Skip to content

Commit

Permalink
Drop support for python 3.7 (#1334)
Browse files Browse the repository at this point in the history
Drop support for Python 3.7 and upgrade min version of some dependencies
  • Loading branch information
luong-komorebi authored Sep 24, 2023
1 parent 9424e3f commit 656e3c2
Show file tree
Hide file tree
Showing 10 changed files with 29 additions and 37 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ jobs:
fail-fast: false
matrix:
os: [ubuntu-latest]
python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"]
python-version: ["3.8", "3.9", "3.10", "3.11"]
steps:
- name: Checkout
uses: actions/checkout@v4
Expand Down Expand Up @@ -93,7 +93,7 @@ jobs:
- uses: actions/checkout@v4
- uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1
with:
python_version: "3.7"
python_version: "3.8"
- uses: jupyterlab/maintainer-tools/.github/actions/install-minimums@v1
- name: Run the unit tests
run: |
Expand Down
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ repos:
[mdformat-gfm, mdformat-frontmatter, mdformat-footnote]

- repo: https://github.com/psf/black
rev: 23.3.0
rev: 23.9.1
hooks:
- id: black

Expand Down
4 changes: 2 additions & 2 deletions docs/source/developers/kernel-specification.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@ Here's an example from the [`spark_python_yarn_cluster`](https://github.com/jupy
"env": {
"SPARK_HOME": "/usr/hdp/current/spark2-client",
"PYSPARK_PYTHON": "/opt/conda/bin/python",
"PYTHONPATH": "${HOME}/.local/lib/python3.7/site-packages:/usr/hdp/current/spark2-client/python:/usr/hdp/current/spark2-client/python/lib/py4j-0.10.6-src.zip",
"SPARK_OPTS": "--master yarn --deploy-mode cluster --name ${KERNEL_ID:-ERROR__NO__KERNEL_ID} --conf spark.yarn.submit.waitAppCompletion=false --conf spark.yarn.appMasterEnv.PYTHONUSERBASE=/home/${KERNEL_USERNAME}/.local --conf spark.yarn.appMasterEnv.PYTHONPATH=${HOME}/.local/lib/python3.7/site-packages:/usr/hdp/current/spark2-client/python:/usr/hdp/current/spark2-client/python/lib/py4j-0.10.6-src.zip --conf spark.yarn.appMasterEnv.PATH=/opt/conda/bin:$PATH ${KERNEL_EXTRA_SPARK_OPTS}",
"PYTHONPATH": "${HOME}/.local/lib/python3.8/site-packages:/usr/hdp/current/spark2-client/python:/usr/hdp/current/spark2-client/python/lib/py4j-0.10.6-src.zip",
"SPARK_OPTS": "--master yarn --deploy-mode cluster --name ${KERNEL_ID:-ERROR__NO__KERNEL_ID} --conf spark.yarn.submit.waitAppCompletion=false --conf spark.yarn.appMasterEnv.PYTHONUSERBASE=/home/${KERNEL_USERNAME}/.local --conf spark.yarn.appMasterEnv.PYTHONPATH=${HOME}/.local/lib/python3.8/site-packages:/usr/hdp/current/spark2-client/python:/usr/hdp/current/spark2-client/python/lib/py4j-0.10.6-src.zip --conf spark.yarn.appMasterEnv.PATH=/opt/conda/bin:$PATH ${KERNEL_EXTRA_SPARK_OPTS}",
"LAUNCH_OPTS": ""
},
"argv": [
Expand Down
6 changes: 3 additions & 3 deletions docs/source/developers/rest-api.rst
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ the icon filenames to be used by the front-end application.
"env": {
"SPARK_HOME": "/usr/hdp/current/spark2-client",
"PYSPARK_PYTHON": "/opt/conda/bin/python",
"PYTHONPATH": "${HOME}/.local/lib/python3.7/site-packages:/usr/hdp/current/spark2-client/python:/usr/hdp/current/spark2-client/python/lib/py4j-0.10.6-src.zip",
"PYTHONPATH": "${HOME}/.local/lib/python3.8/site-packages:/usr/hdp/current/spark2-client/python:/usr/hdp/current/spark2-client/python/lib/py4j-0.10.6-src.zip",
"SPARK_OPTS": "--master yarn --deploy-mode client --name ${KERNEL_ID:-ERROR__NO__KERNEL_ID} ${KERNEL_EXTRA_SPARK_OPTS}",
"LAUNCH_OPTS": ""
},
Expand Down Expand Up @@ -215,8 +215,8 @@ the icon filenames to be used by the front-end application.
"env": {
"SPARK_HOME": "/usr/hdp/current/spark2-client",
"PYSPARK_PYTHON": "/opt/conda/bin/python",
"PYTHONPATH": "${HOME}/.local/lib/python3.7/site-packages:/usr/hdp/current/spark2-client/python:/usr/hdp/current/spark2-client/python/lib/py4j-0.10.6-src.zip",
"SPARK_OPTS": "--master yarn --deploy-mode cluster --name ${KERNEL_ID:-ERROR__NO__KERNEL_ID} --conf spark.yarn.submit.waitAppCompletion=false --conf spark.yarn.appMasterEnv.PYTHONUSERBASE=/home/${KERNEL_USERNAME}/.local --conf spark.yarn.appMasterEnv.PYTHONPATH=${HOME}/.local/lib/python3.7/site-packages:/usr/hdp/current/spark2-client/python:/usr/hdp/current/spark2-client/python/lib/py4j-0.10.6-src.zip --conf spark.yarn.appMasterEnv.PATH=/opt/conda/bin:$PATH ${KERNEL_EXTRA_SPARK_OPTS}",
"PYTHONPATH": "${HOME}/.local/lib/python3.8/site-packages:/usr/hdp/current/spark2-client/python:/usr/hdp/current/spark2-client/python/lib/py4j-0.10.6-src.zip",
"SPARK_OPTS": "--master yarn --deploy-mode cluster --name ${KERNEL_ID:-ERROR__NO__KERNEL_ID} --conf spark.yarn.submit.waitAppCompletion=false --conf spark.yarn.appMasterEnv.PYTHONUSERBASE=/home/${KERNEL_USERNAME}/.local --conf spark.yarn.appMasterEnv.PYTHONPATH=${HOME}/.local/lib/python3.8/site-packages:/usr/hdp/current/spark2-client/python:/usr/hdp/current/spark2-client/python/lib/py4j-0.10.6-src.zip --conf spark.yarn.appMasterEnv.PATH=/opt/conda/bin:$PATH ${KERNEL_EXTRA_SPARK_OPTS}",
"LAUNCH_OPTS": ""
},
"display_name": "Spark - Python (YARN Cluster Mode)",
Expand Down
29 changes: 11 additions & 18 deletions docs/source/other/troubleshooting.md
Original file line number Diff line number Diff line change
Expand Up @@ -125,19 +125,19 @@ Scenario: **I'm trying to launch a (Python/Scala/R) kernel, but it failed with `

```
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/tornado/web.py", line 1512, in _execute
File "/opt/conda/lib/python3.8/site-packages/tornado/web.py", line 1512, in _execute
result = yield result
File "/opt/conda/lib/python3.7/site-packages/tornado/gen.py", line 1055, in run
File "/opt/conda/lib/python3.8/site-packages/tornado/gen.py", line 1055, in run
value = future.result()
...
...
File "/opt/conda/lib/python3.7/site-packages/enterprise_gateway/services/kernels/remotemanager.py", line 125, in _launch_kernel
File "/opt/conda/lib/python3.8/site-packages/enterprise_gateway/services/kernels/remotemanager.py", line 125, in _launch_kernel
return self.process_proxy.launch_process(kernel_cmd, **kw)
File "/opt/conda/lib/python3.7/site-packages/enterprise_gateway/services/processproxies/yarn.py", line 63, in launch_process
File "/opt/conda/lib/python3.8/site-packages/enterprise_gateway/services/processproxies/yarn.py", line 63, in launch_process
self.confirm_remote_startup(kernel_cmd, **kw)
File "/opt/conda/lib/python3.7/site-packages/enterprise_gateway/services/processproxies/yarn.py", line 174, in confirm_remote_startup
File "/opt/conda/lib/python3.8/site-packages/enterprise_gateway/services/processproxies/yarn.py", line 174, in confirm_remote_startup
ready_to_connect = self.receive_connection_info()
File "/opt/conda/lib/python3.7/site-packages/enterprise_gateway/services/processproxies/processproxy.py", line 565, in receive_connection_info
File "/opt/conda/lib/python3.8/site-packages/enterprise_gateway/services/processproxies/processproxy.py", line 565, in receive_connection_info
raise e
TypeError: Incorrect padding
```
Expand Down Expand Up @@ -166,17 +166,17 @@ Scenario: **I'm trying to launch a (Python/Scala/R) kernel with port range, but

```
Traceback (most recent call last):
File "/opt/conda/lib/python3.7/site-packages/tornado/web.py", line 1511, in _execute
File "/opt/conda/lib/python3.8/site-packages/tornado/web.py", line 1511, in _execute
result = yield result
File "/opt/conda/lib/python3.7/site-packages/tornado/gen.py", line 1055, in run
File "/opt/conda/lib/python3.8/site-packages/tornado/gen.py", line 1055, in run
value = future.result()
....
....
File "/opt/conda/lib/python3.7/site-packages/enterprise_gateway/services/processproxies/processproxy.py", line 478, in __init__
File "/opt/conda/lib/python3.8/site-packages/enterprise_gateway/services/processproxies/processproxy.py", line 478, in __init__
super(RemoteProcessProxy, self).__init__(kernel_manager, proxy_config)
File "/opt/conda/lib/python3.7/site-packages/enterprise_gateway/services/processproxies/processproxy.py", line 87, in __init__
File "/opt/conda/lib/python3.8/site-packages/enterprise_gateway/services/processproxies/processproxy.py", line 87, in __init__
self._validate_port_range(proxy_config)
File "/opt/conda/lib/python3.7/site-packages/enterprise_gateway/services/processproxies/processproxy.py", line 407, in _validate_port_range
File "/opt/conda/lib/python3.8/site-packages/enterprise_gateway/services/processproxies/processproxy.py", line 407, in _validate_port_range
"port numbers is (1024, 65535).".format(self.lower_port))
RuntimeError: Invalid port range '1000..2000' specified. Range for valid port numbers is (1024, 65535).
```
Expand Down Expand Up @@ -214,13 +214,6 @@ This is usually seen when you are trying to use more resources then what is avai
To address this issue, increase the amount of memory available for your Hadoop YARN application or another
resource manager managing the kernel. For example, on Kubernetes, this may be a time when the kernel specification's [kernel-pod.yaml.j2](https://github.com/jupyter-server/enterprise_gateway/blob/main/etc/kernel-launchers/kubernetes/scripts/kernel-pod.yaml.j2) file should be extended with resource quotas.

## Spark and Python Versions

Scenario: **PySpark 2.4.x fails on Python 3.8**

PySpark 2.4.x fails on Python 3.8 as described in [SPARK-29536](https://issues.apache.org/jira/browse/SPARK-29536).
Use Python 3.7.x as the issue only seems to have been resolved on Spark 3.0.

## Kerberos

Scenario: **I'm trying to use a notebook with user impersonation on a Kerberos enabled cluster, but it fails to authenticate.**
Expand Down
2 changes: 1 addition & 1 deletion etc/docker/demo-base/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
- Hadoop 2.7.7
- Apache Spark 2.4.6
- Java 1.8 runtime
- Mini-conda latest (python 3.7) with R packages
- Mini-conda latest (python 3.8) with R packages
- Toree 0.4.0-incubating
- `jovyan` service user, with system users `elyra`, `bob`, and `alice`. The jovyan uid is `1000` to match other jupyter
images.
Expand Down
2 changes: 1 addition & 1 deletion etc/kernelspecs/spark_python_yarn_client/kernel.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
"env": {
"SPARK_HOME": "/usr/hdp/current/spark2-client",
"PYSPARK_PYTHON": "/opt/conda/bin/python",
"PYTHONPATH": "${HOME}/.local/lib/python3.7/site-packages:/usr/hdp/current/spark2-client/python:/usr/hdp/current/spark2-client/python/lib/py4j-0.10.6-src.zip",
"PYTHONPATH": "${HOME}/.local/lib/python3.8/site-packages:/usr/hdp/current/spark2-client/python:/usr/hdp/current/spark2-client/python/lib/py4j-0.10.6-src.zip",
"SPARK_OPTS": "--master yarn --deploy-mode client --name ${KERNEL_ID:-ERROR__NO__KERNEL_ID} ${KERNEL_EXTRA_SPARK_OPTS}",
"LAUNCH_OPTS": ""
},
Expand Down
4 changes: 2 additions & 2 deletions etc/kernelspecs/spark_python_yarn_cluster/kernel.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@
"env": {
"SPARK_HOME": "/usr/hdp/current/spark2-client",
"PYSPARK_PYTHON": "/opt/conda/bin/python",
"PYTHONPATH": "${HOME}/.local/lib/python3.7/site-packages:/usr/hdp/current/spark2-client/python:/usr/hdp/current/spark2-client/python/lib/py4j-0.10.6-src.zip",
"SPARK_OPTS": "--master yarn --deploy-mode cluster --name ${KERNEL_ID:-ERROR__NO__KERNEL_ID} --conf spark.yarn.submit.waitAppCompletion=false --conf spark.yarn.appMasterEnv.PYTHONUSERBASE=/home/${KERNEL_USERNAME}/.local --conf spark.yarn.appMasterEnv.PYTHONPATH=${HOME}/.local/lib/python3.7/site-packages:/usr/hdp/current/spark2-client/python:/usr/hdp/current/spark2-client/python/lib/py4j-0.10.6-src.zip --conf spark.yarn.appMasterEnv.PATH=/opt/conda/bin:$PATH --conf spark.yarn.maxAppAttempts=1 ${KERNEL_EXTRA_SPARK_OPTS}",
"PYTHONPATH": "${HOME}/.local/lib/python3.8/site-packages:/usr/hdp/current/spark2-client/python:/usr/hdp/current/spark2-client/python/lib/py4j-0.10.6-src.zip",
"SPARK_OPTS": "--master yarn --deploy-mode cluster --name ${KERNEL_ID:-ERROR__NO__KERNEL_ID} --conf spark.yarn.submit.waitAppCompletion=false --conf spark.yarn.appMasterEnv.PYTHONUSERBASE=/home/${KERNEL_USERNAME}/.local --conf spark.yarn.appMasterEnv.PYTHONPATH=${HOME}/.local/lib/python3.8/site-packages:/usr/hdp/current/spark2-client/python:/usr/hdp/current/spark2-client/python/lib/py4j-0.10.6-src.zip --conf spark.yarn.appMasterEnv.PATH=/opt/conda/bin:$PATH --conf spark.yarn.maxAppAttempts=1 ${KERNEL_EXTRA_SPARK_OPTS}",
"LAUNCH_OPTS": ""
},
"argv": [
Expand Down
9 changes: 4 additions & 5 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -15,25 +15,24 @@ classifiers = [
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
]
requires-python = ">=3.7"
requires-python = ">=3.8"
dependencies = [
"docker>=3.5.0",
"future",
"jinja2>=3.1",
"jupyter_client>=6.1.12,<7", # Remove cap once EG supports kernel provisioners
"jupyter_core>=4.7.0",
"kubernetes>=18.20.0",
"jupyter_server>=1.3,<2.0", # Remove cap (increase floor) once EG suport kernel provisioners
"jupyter_server>=1.7,<2.0", # Remove cap (increase floor) once EG suport kernel provisioners
"paramiko>=2.11",
"pexpect>=4.8.0",
"pycryptodomex>=3.9.7",
"pyzmq>=17.0,<25.0", # Pyzmq 25 removes deprecated code that jupyter_client 6 uses, remove if v6 gets updated
"pyzmq>=20.0,<25.0", # Pyzmq 25 removes deprecated code that jupyter_client 6 uses, remove if v6 gets updated
"requests>=2.14.2",
"tornado>=6.1",
"traitlets>=5.3.0",
Expand Down Expand Up @@ -65,7 +64,7 @@ test = [
"websocket-client"
]
lint = [
"black[jupyter]==23.3.0",
"black[jupyter]==23.9.1",
"mdformat>0.7",
"mdformat-gfm>=0.3.5",
"ruff==0.0.290"
Expand Down
4 changes: 2 additions & 2 deletions requirements.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,14 @@ dependencies:
- jinja2>=3.1
- jupyter_client>=6.1
- jupyter_core>=4.6.0
- jupyter_server>=1.2
- jupyter_server>=1.7
- paramiko>=2.1.2
- pexpect>=4.2.0
- pip
- pre-commit
- pycryptodomex>=3.9.7
- python-kubernetes>=18.20.0
- pyzmq>=17.0.0
- pyzmq>=20.0.0
- requests>=2.7,<3.0
- tornado>=6.1
- traitlets>=4.2.0
Expand Down

0 comments on commit 656e3c2

Please sign in to comment.