Skip to content

Commit

Permalink
[SPARK-50015][BUILD] Upgrade grpcio* to 1.67.0 and grpc-java to 1…
Browse files Browse the repository at this point in the history
….67.1
  • Loading branch information
dongjoon-hyun committed Oct 18, 2024
1 parent b1d1f10 commit eb8898c
Show file tree
Hide file tree
Showing 11 changed files with 39 additions and 18 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/build_and_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -276,7 +276,7 @@ jobs:
- name: Install Python packages (Python 3.11)
if: (contains(matrix.modules, 'sql') && !contains(matrix.modules, 'sql-')) || contains(matrix.modules, 'connect')
run: |
python3.11 -m pip install 'numpy>=1.20.0' pyarrow pandas scipy unittest-xml-reporting 'lxml==4.9.4' 'grpcio==1.62.0' 'grpcio-status==1.62.0' 'protobuf==4.25.1'
python3.11 -m pip install 'numpy>=1.20.0' pyarrow pandas scipy unittest-xml-reporting 'lxml==4.9.4' 'grpcio==1.67.0' 'grpcio-status==1.67.0' 'protobuf==5.28.2'
python3.11 -m pip list
# Run the tests.
- name: Run tests
Expand Down Expand Up @@ -725,7 +725,7 @@ jobs:
python3.9 -m pip install 'sphinx==4.5.0' mkdocs 'pydata_sphinx_theme>=0.13' sphinx-copybutton nbsphinx numpydoc jinja2 markupsafe 'pyzmq<24.0.0' \
ipython ipython_genutils sphinx_plotly_directive 'numpy==1.26.4' pyarrow pandas 'plotly>=4.8' 'docutils<0.18.0' \
'flake8==3.9.0' 'mypy==1.8.0' 'pytest==7.1.3' 'pytest-mypy-plugins==1.9.3' 'black==23.9.1' \
'pandas-stubs==1.2.0.53' 'grpcio==1.62.0' 'grpc-stubs==1.24.11' 'googleapis-common-protos-stubs==2.2.0' \
'pandas-stubs==1.2.0.53' 'grpcio==1.67.0' 'grpc-stubs==1.24.11' 'googleapis-common-protos-stubs==2.2.0' \
'sphinxcontrib-applehelp==1.0.4' 'sphinxcontrib-devhelp==1.0.2' 'sphinxcontrib-htmlhelp==2.0.1' 'sphinxcontrib-qthelp==1.0.3' 'sphinxcontrib-serializinghtml==1.1.5'
python3.9 -m pip list
- name: Python linter
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/maven_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ jobs:
- name: Install Python packages (Python 3.11)
if: (contains(matrix.modules, 'sql#core')) || contains(matrix.modules, 'connect')
run: |
python3.11 -m pip install 'numpy>=1.20.0' pyarrow pandas scipy unittest-xml-reporting 'grpcio==1.62.0' 'grpcio-status==1.62.0' 'protobuf==4.25.1'
python3.11 -m pip install 'numpy>=1.20.0' pyarrow pandas scipy unittest-xml-reporting 'grpcio==1.67.0' 'grpcio-status==1.67.0' 'protobuf==4.25.1'
python3.11 -m pip list
# Run the tests.
- name: Run tests
Expand Down
4 changes: 2 additions & 2 deletions dev/create-release/spark-rm/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ RUN pypy3 -m pip install numpy 'six==1.16.0' 'pandas==2.2.2' scipy coverage matp

ARG BASIC_PIP_PKGS="numpy pyarrow>=15.0.0 six==1.16.0 pandas==2.2.2 scipy plotly>=4.8 mlflow>=2.8.1 coverage matplotlib openpyxl memory-profiler>=0.61.0 scikit-learn>=1.3.2 twine==3.4.1"
# Python deps for Spark Connect
ARG CONNECT_PIP_PKGS="grpcio==1.62.0 grpcio-status==1.62.0 protobuf==4.25.1 googleapis-common-protos==1.56.4"
ARG CONNECT_PIP_PKGS="grpcio==1.67.0 grpcio-status==1.67.0 protobuf==4.25.1 googleapis-common-protos==1.56.4"

# Install Python 3.10 packages
RUN curl -sS https://bootstrap.pypa.io/get-pip.py | python3.10
Expand Down Expand Up @@ -131,7 +131,7 @@ RUN python3.9 -m pip install --force $BASIC_PIP_PKGS unittest-xml-reporting $CON
RUN python3.9 -m pip install 'sphinx==4.5.0' mkdocs 'pydata_sphinx_theme>=0.13' sphinx-copybutton nbsphinx numpydoc jinja2 markupsafe 'pyzmq<24.0.0' \
ipython ipython_genutils sphinx_plotly_directive 'numpy>=1.20.0' pyarrow pandas 'plotly>=4.8' 'docutils<0.18.0' \
'flake8==3.9.0' 'mypy==1.8.0' 'pytest==7.1.3' 'pytest-mypy-plugins==1.9.3' 'black==23.9.1' \
'pandas-stubs==1.2.0.53' 'grpcio==1.62.0' 'grpc-stubs==1.24.11' 'googleapis-common-protos-stubs==2.2.0' \
'pandas-stubs==1.2.0.53' 'grpcio==1.67.0' 'grpc-stubs==1.24.11' 'googleapis-common-protos-stubs==2.2.0' \
'sphinxcontrib-applehelp==1.0.4' 'sphinxcontrib-devhelp==1.0.2' 'sphinxcontrib-htmlhelp==2.0.1' 'sphinxcontrib-qthelp==1.0.3' 'sphinxcontrib-serializinghtml==1.1.5'
RUN python3.9 -m pip list

Expand Down
2 changes: 1 addition & 1 deletion dev/infra/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ RUN pypy3 -m pip install numpy 'six==1.16.0' 'pandas==2.2.3' scipy coverage matp

ARG BASIC_PIP_PKGS="numpy pyarrow>=15.0.0 six==1.16.0 pandas==2.2.3 scipy plotly>=4.8 mlflow>=2.8.1 coverage matplotlib openpyxl memory-profiler>=0.61.0 scikit-learn>=1.3.2"
# Python deps for Spark Connect
ARG CONNECT_PIP_PKGS="grpcio==1.62.0 grpcio-status==1.62.0 protobuf==4.25.1 googleapis-common-protos==1.56.4 graphviz==0.20.3"
ARG CONNECT_PIP_PKGS="grpcio==1.67.0 grpcio-status==1.67.0 protobuf==5.28.2 googleapis-common-protos==1.65.0 graphviz==0.20.3"

# Install Python 3.10 packages
RUN curl -sS https://bootstrap.pypa.io/get-pip.py | python3.10
Expand Down
6 changes: 3 additions & 3 deletions dev/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -58,9 +58,9 @@ black==23.9.1
py

# Spark Connect (required)
grpcio>=1.62.0
grpcio-status>=1.62.0
googleapis-common-protos>=1.56.4
grpcio>=1.67.0
grpcio-status>=1.67.0
googleapis-common-protos>=1.65.0

# Spark Connect python proto generation plugin (optional)
mypy-protobuf==3.3.0
Expand Down
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -294,7 +294,7 @@
<!-- Version used in Connect -->
<connect.guava.version>33.2.1-jre</connect.guava.version>
<guava.failureaccess.version>1.0.2</guava.failureaccess.version>
<io.grpc.version>1.62.2</io.grpc.version>
<io.grpc.version>1.67.1</io.grpc.version>
<mima.version>1.1.4</mima.version>
<tomcat.annotations.api.version>6.0.53</tomcat.annotations.api.version>

Expand Down
2 changes: 1 addition & 1 deletion project/SparkBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ object BuildCommons {
// SPARK-41247: needs to be consistent with `protobuf.version` in `pom.xml`.
val protoVersion = "3.25.5"
// GRPC version used for Spark Connect.
val grpcVersion = "1.62.2"
val grpcVersion = "1.67.1"
}

object SparkBuild extends PomBuild {
Expand Down
6 changes: 3 additions & 3 deletions python/docs/source/getting_started/install.rst
Original file line number Diff line number Diff line change
Expand Up @@ -208,9 +208,9 @@ Package Supported version Note
========================== ================= ==========================
`pandas` >=2.0.0 Required for Spark Connect
`pyarrow` >=10.0.0 Required for Spark Connect
`grpcio` >=1.62.0 Required for Spark Connect
`grpcio-status` >=1.62.0 Required for Spark Connect
`googleapis-common-protos` >=1.56.4 Required for Spark Connect
`grpcio` >=1.67.0 Required for Spark Connect
`grpcio-status` >=1.67.0 Required for Spark Connect
`googleapis-common-protos` >=1.65.0 Required for Spark Connect
`graphviz` >=0.20 Optional for Spark Connect
========================== ================= ==========================

Expand Down
4 changes: 2 additions & 2 deletions python/packaging/classic/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,8 +153,8 @@ def _supports_symlinks():
_minimum_pandas_version = "2.0.0"
_minimum_numpy_version = "1.21"
_minimum_pyarrow_version = "10.0.0"
_minimum_grpc_version = "1.62.0"
_minimum_googleapis_common_protos_version = "1.56.4"
_minimum_grpc_version = "1.67.0"
_minimum_googleapis_common_protos_version = "1.65.0"


class InstallCommand(install):
Expand Down
21 changes: 21 additions & 0 deletions python/pyspark/sql/connect/proto/base_pb2_grpc.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,51 +34,61 @@ def __init__(self, channel):
"/spark.connect.SparkConnectService/ExecutePlan",
request_serializer=spark_dot_connect_dot_base__pb2.ExecutePlanRequest.SerializeToString,
response_deserializer=spark_dot_connect_dot_base__pb2.ExecutePlanResponse.FromString,
_registered_method=True,
)
self.AnalyzePlan = channel.unary_unary(
"/spark.connect.SparkConnectService/AnalyzePlan",
request_serializer=spark_dot_connect_dot_base__pb2.AnalyzePlanRequest.SerializeToString,
response_deserializer=spark_dot_connect_dot_base__pb2.AnalyzePlanResponse.FromString,
_registered_method=True,
)
self.Config = channel.unary_unary(
"/spark.connect.SparkConnectService/Config",
request_serializer=spark_dot_connect_dot_base__pb2.ConfigRequest.SerializeToString,
response_deserializer=spark_dot_connect_dot_base__pb2.ConfigResponse.FromString,
_registered_method=True,
)
self.AddArtifacts = channel.stream_unary(
"/spark.connect.SparkConnectService/AddArtifacts",
request_serializer=spark_dot_connect_dot_base__pb2.AddArtifactsRequest.SerializeToString,
response_deserializer=spark_dot_connect_dot_base__pb2.AddArtifactsResponse.FromString,
_registered_method=True,
)
self.ArtifactStatus = channel.unary_unary(
"/spark.connect.SparkConnectService/ArtifactStatus",
request_serializer=spark_dot_connect_dot_base__pb2.ArtifactStatusesRequest.SerializeToString,
response_deserializer=spark_dot_connect_dot_base__pb2.ArtifactStatusesResponse.FromString,
_registered_method=True,
)
self.Interrupt = channel.unary_unary(
"/spark.connect.SparkConnectService/Interrupt",
request_serializer=spark_dot_connect_dot_base__pb2.InterruptRequest.SerializeToString,
response_deserializer=spark_dot_connect_dot_base__pb2.InterruptResponse.FromString,
_registered_method=True,
)
self.ReattachExecute = channel.unary_stream(
"/spark.connect.SparkConnectService/ReattachExecute",
request_serializer=spark_dot_connect_dot_base__pb2.ReattachExecuteRequest.SerializeToString,
response_deserializer=spark_dot_connect_dot_base__pb2.ExecutePlanResponse.FromString,
_registered_method=True,
)
self.ReleaseExecute = channel.unary_unary(
"/spark.connect.SparkConnectService/ReleaseExecute",
request_serializer=spark_dot_connect_dot_base__pb2.ReleaseExecuteRequest.SerializeToString,
response_deserializer=spark_dot_connect_dot_base__pb2.ReleaseExecuteResponse.FromString,
_registered_method=True,
)
self.ReleaseSession = channel.unary_unary(
"/spark.connect.SparkConnectService/ReleaseSession",
request_serializer=spark_dot_connect_dot_base__pb2.ReleaseSessionRequest.SerializeToString,
response_deserializer=spark_dot_connect_dot_base__pb2.ReleaseSessionResponse.FromString,
_registered_method=True,
)
self.FetchErrorDetails = channel.unary_unary(
"/spark.connect.SparkConnectService/FetchErrorDetails",
request_serializer=spark_dot_connect_dot_base__pb2.FetchErrorDetailsRequest.SerializeToString,
response_deserializer=spark_dot_connect_dot_base__pb2.FetchErrorDetailsResponse.FromString,
_registered_method=True,
)


Expand Down Expand Up @@ -220,6 +230,7 @@ def add_SparkConnectServiceServicer_to_server(servicer, server):
"spark.connect.SparkConnectService", rpc_method_handlers
)
server.add_generic_rpc_handlers((generic_handler,))
server.add_registered_method_handlers("spark.connect.SparkConnectService", rpc_method_handlers)


# This class is part of an EXPERIMENTAL API.
Expand Down Expand Up @@ -253,6 +264,7 @@ def ExecutePlan(
wait_for_ready,
timeout,
metadata,
_registered_method=True,
)

@staticmethod
Expand Down Expand Up @@ -282,6 +294,7 @@ def AnalyzePlan(
wait_for_ready,
timeout,
metadata,
_registered_method=True,
)

@staticmethod
Expand Down Expand Up @@ -311,6 +324,7 @@ def Config(
wait_for_ready,
timeout,
metadata,
_registered_method=True,
)

@staticmethod
Expand Down Expand Up @@ -340,6 +354,7 @@ def AddArtifacts(
wait_for_ready,
timeout,
metadata,
_registered_method=True,
)

@staticmethod
Expand Down Expand Up @@ -369,6 +384,7 @@ def ArtifactStatus(
wait_for_ready,
timeout,
metadata,
_registered_method=True,
)

@staticmethod
Expand Down Expand Up @@ -398,6 +414,7 @@ def Interrupt(
wait_for_ready,
timeout,
metadata,
_registered_method=True,
)

@staticmethod
Expand Down Expand Up @@ -427,6 +444,7 @@ def ReattachExecute(
wait_for_ready,
timeout,
metadata,
_registered_method=True,
)

@staticmethod
Expand Down Expand Up @@ -456,6 +474,7 @@ def ReleaseExecute(
wait_for_ready,
timeout,
metadata,
_registered_method=True,
)

@staticmethod
Expand Down Expand Up @@ -485,6 +504,7 @@ def ReleaseSession(
wait_for_ready,
timeout,
metadata,
_registered_method=True,
)

@staticmethod
Expand Down Expand Up @@ -514,4 +534,5 @@ def FetchErrorDetails(
wait_for_ready,
timeout,
metadata,
_registered_method=True,
)
4 changes: 2 additions & 2 deletions sql/connect/common/src/main/buf.gen.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -22,14 +22,14 @@ plugins:
out: gen/proto/csharp
- plugin: buf.build/protocolbuffers/java:v21.7
out: gen/proto/java
- plugin: buf.build/grpc/ruby:v1.62.0
- plugin: buf.build/grpc/ruby:v1.67.0
out: gen/proto/ruby
- plugin: buf.build/protocolbuffers/ruby:v21.7
out: gen/proto/ruby
# Building the Python build and building the mypy interfaces.
- plugin: buf.build/protocolbuffers/python:v21.7
out: gen/proto/python
- plugin: buf.build/grpc/python:v1.62.0
- plugin: buf.build/grpc/python:v1.67.0
out: gen/proto/python
- name: mypy
out: gen/proto/python
Expand Down

0 comments on commit eb8898c

Please sign in to comment.