Skip to content

Commit eb8898c

Browse files
committed
[SPARK-50015][BUILD] Upgrade grpcio* to 1.67.0 and grpc-java to 1.67.1
1 parent b1d1f10 commit eb8898c

File tree

11 files changed

+39
-18
lines changed

11 files changed

+39
-18
lines changed

.github/workflows/build_and_test.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -276,7 +276,7 @@ jobs:
276276
- name: Install Python packages (Python 3.11)
277277
if: (contains(matrix.modules, 'sql') && !contains(matrix.modules, 'sql-')) || contains(matrix.modules, 'connect')
278278
run: |
279-
python3.11 -m pip install 'numpy>=1.20.0' pyarrow pandas scipy unittest-xml-reporting 'lxml==4.9.4' 'grpcio==1.62.0' 'grpcio-status==1.62.0' 'protobuf==4.25.1'
279+
python3.11 -m pip install 'numpy>=1.20.0' pyarrow pandas scipy unittest-xml-reporting 'lxml==4.9.4' 'grpcio==1.67.0' 'grpcio-status==1.67.0' 'protobuf==5.28.2'
280280
python3.11 -m pip list
281281
# Run the tests.
282282
- name: Run tests
@@ -725,7 +725,7 @@ jobs:
725725
python3.9 -m pip install 'sphinx==4.5.0' mkdocs 'pydata_sphinx_theme>=0.13' sphinx-copybutton nbsphinx numpydoc jinja2 markupsafe 'pyzmq<24.0.0' \
726726
ipython ipython_genutils sphinx_plotly_directive 'numpy==1.26.4' pyarrow pandas 'plotly>=4.8' 'docutils<0.18.0' \
727727
'flake8==3.9.0' 'mypy==1.8.0' 'pytest==7.1.3' 'pytest-mypy-plugins==1.9.3' 'black==23.9.1' \
728-
'pandas-stubs==1.2.0.53' 'grpcio==1.62.0' 'grpc-stubs==1.24.11' 'googleapis-common-protos-stubs==2.2.0' \
728+
'pandas-stubs==1.2.0.53' 'grpcio==1.67.0' 'grpc-stubs==1.24.11' 'googleapis-common-protos-stubs==2.2.0' \
729729
'sphinxcontrib-applehelp==1.0.4' 'sphinxcontrib-devhelp==1.0.2' 'sphinxcontrib-htmlhelp==2.0.1' 'sphinxcontrib-qthelp==1.0.3' 'sphinxcontrib-serializinghtml==1.1.5'
730730
python3.9 -m pip list
731731
- name: Python linter

.github/workflows/maven_test.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -178,7 +178,7 @@ jobs:
178178
- name: Install Python packages (Python 3.11)
179179
if: (contains(matrix.modules, 'sql#core')) || contains(matrix.modules, 'connect')
180180
run: |
181-
python3.11 -m pip install 'numpy>=1.20.0' pyarrow pandas scipy unittest-xml-reporting 'grpcio==1.62.0' 'grpcio-status==1.62.0' 'protobuf==4.25.1'
181+
python3.11 -m pip install 'numpy>=1.20.0' pyarrow pandas scipy unittest-xml-reporting 'grpcio==1.67.0' 'grpcio-status==1.67.0' 'protobuf==4.25.1'
182182
python3.11 -m pip list
183183
# Run the tests.
184184
- name: Run tests

dev/create-release/spark-rm/Dockerfile

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,7 @@ RUN pypy3 -m pip install numpy 'six==1.16.0' 'pandas==2.2.2' scipy coverage matp
102102

103103
ARG BASIC_PIP_PKGS="numpy pyarrow>=15.0.0 six==1.16.0 pandas==2.2.2 scipy plotly>=4.8 mlflow>=2.8.1 coverage matplotlib openpyxl memory-profiler>=0.61.0 scikit-learn>=1.3.2 twine==3.4.1"
104104
# Python deps for Spark Connect
105-
ARG CONNECT_PIP_PKGS="grpcio==1.62.0 grpcio-status==1.62.0 protobuf==4.25.1 googleapis-common-protos==1.56.4"
105+
ARG CONNECT_PIP_PKGS="grpcio==1.67.0 grpcio-status==1.67.0 protobuf==4.25.1 googleapis-common-protos==1.56.4"
106106

107107
# Install Python 3.10 packages
108108
RUN curl -sS https://bootstrap.pypa.io/get-pip.py | python3.10
@@ -131,7 +131,7 @@ RUN python3.9 -m pip install --force $BASIC_PIP_PKGS unittest-xml-reporting $CON
131131
RUN python3.9 -m pip install 'sphinx==4.5.0' mkdocs 'pydata_sphinx_theme>=0.13' sphinx-copybutton nbsphinx numpydoc jinja2 markupsafe 'pyzmq<24.0.0' \
132132
ipython ipython_genutils sphinx_plotly_directive 'numpy>=1.20.0' pyarrow pandas 'plotly>=4.8' 'docutils<0.18.0' \
133133
'flake8==3.9.0' 'mypy==1.8.0' 'pytest==7.1.3' 'pytest-mypy-plugins==1.9.3' 'black==23.9.1' \
134-
'pandas-stubs==1.2.0.53' 'grpcio==1.62.0' 'grpc-stubs==1.24.11' 'googleapis-common-protos-stubs==2.2.0' \
134+
'pandas-stubs==1.2.0.53' 'grpcio==1.67.0' 'grpc-stubs==1.24.11' 'googleapis-common-protos-stubs==2.2.0' \
135135
'sphinxcontrib-applehelp==1.0.4' 'sphinxcontrib-devhelp==1.0.2' 'sphinxcontrib-htmlhelp==2.0.1' 'sphinxcontrib-qthelp==1.0.3' 'sphinxcontrib-serializinghtml==1.1.5'
136136
RUN python3.9 -m pip list
137137

dev/infra/Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -96,7 +96,7 @@ RUN pypy3 -m pip install numpy 'six==1.16.0' 'pandas==2.2.3' scipy coverage matp
9696

9797
ARG BASIC_PIP_PKGS="numpy pyarrow>=15.0.0 six==1.16.0 pandas==2.2.3 scipy plotly>=4.8 mlflow>=2.8.1 coverage matplotlib openpyxl memory-profiler>=0.61.0 scikit-learn>=1.3.2"
9898
# Python deps for Spark Connect
99-
ARG CONNECT_PIP_PKGS="grpcio==1.62.0 grpcio-status==1.62.0 protobuf==4.25.1 googleapis-common-protos==1.56.4 graphviz==0.20.3"
99+
ARG CONNECT_PIP_PKGS="grpcio==1.67.0 grpcio-status==1.67.0 protobuf==5.28.2 googleapis-common-protos==1.65.0 graphviz==0.20.3"
100100

101101
# Install Python 3.10 packages
102102
RUN curl -sS https://bootstrap.pypa.io/get-pip.py | python3.10

dev/requirements.txt

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -58,9 +58,9 @@ black==23.9.1
5858
py
5959

6060
# Spark Connect (required)
61-
grpcio>=1.62.0
62-
grpcio-status>=1.62.0
63-
googleapis-common-protos>=1.56.4
61+
grpcio>=1.67.0
62+
grpcio-status>=1.67.0
63+
googleapis-common-protos>=1.65.0
6464

6565
# Spark Connect python proto generation plugin (optional)
6666
mypy-protobuf==3.3.0

pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -294,7 +294,7 @@
294294
<!-- Version used in Connect -->
295295
<connect.guava.version>33.2.1-jre</connect.guava.version>
296296
<guava.failureaccess.version>1.0.2</guava.failureaccess.version>
297-
<io.grpc.version>1.62.2</io.grpc.version>
297+
<io.grpc.version>1.67.1</io.grpc.version>
298298
<mima.version>1.1.4</mima.version>
299299
<tomcat.annotations.api.version>6.0.53</tomcat.annotations.api.version>
300300

project/SparkBuild.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,7 @@ object BuildCommons {
9191
// SPARK-41247: needs to be consistent with `protobuf.version` in `pom.xml`.
9292
val protoVersion = "3.25.5"
9393
// GRPC version used for Spark Connect.
94-
val grpcVersion = "1.62.2"
94+
val grpcVersion = "1.67.1"
9595
}
9696

9797
object SparkBuild extends PomBuild {

python/docs/source/getting_started/install.rst

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -208,9 +208,9 @@ Package Supported version Note
208208
========================== ================= ==========================
209209
`pandas` >=2.0.0 Required for Spark Connect
210210
`pyarrow` >=10.0.0 Required for Spark Connect
211-
`grpcio` >=1.62.0 Required for Spark Connect
212-
`grpcio-status` >=1.62.0 Required for Spark Connect
213-
`googleapis-common-protos` >=1.56.4 Required for Spark Connect
211+
`grpcio` >=1.67.0 Required for Spark Connect
212+
`grpcio-status` >=1.67.0 Required for Spark Connect
213+
`googleapis-common-protos` >=1.65.0 Required for Spark Connect
214214
`graphviz` >=0.20 Optional for Spark Connect
215215
========================== ================= ==========================
216216

python/packaging/classic/setup.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -153,8 +153,8 @@ def _supports_symlinks():
153153
_minimum_pandas_version = "2.0.0"
154154
_minimum_numpy_version = "1.21"
155155
_minimum_pyarrow_version = "10.0.0"
156-
_minimum_grpc_version = "1.62.0"
157-
_minimum_googleapis_common_protos_version = "1.56.4"
156+
_minimum_grpc_version = "1.67.0"
157+
_minimum_googleapis_common_protos_version = "1.65.0"
158158

159159

160160
class InstallCommand(install):

python/pyspark/sql/connect/proto/base_pb2_grpc.py

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,51 +34,61 @@ def __init__(self, channel):
3434
"/spark.connect.SparkConnectService/ExecutePlan",
3535
request_serializer=spark_dot_connect_dot_base__pb2.ExecutePlanRequest.SerializeToString,
3636
response_deserializer=spark_dot_connect_dot_base__pb2.ExecutePlanResponse.FromString,
37+
_registered_method=True,
3738
)
3839
self.AnalyzePlan = channel.unary_unary(
3940
"/spark.connect.SparkConnectService/AnalyzePlan",
4041
request_serializer=spark_dot_connect_dot_base__pb2.AnalyzePlanRequest.SerializeToString,
4142
response_deserializer=spark_dot_connect_dot_base__pb2.AnalyzePlanResponse.FromString,
43+
_registered_method=True,
4244
)
4345
self.Config = channel.unary_unary(
4446
"/spark.connect.SparkConnectService/Config",
4547
request_serializer=spark_dot_connect_dot_base__pb2.ConfigRequest.SerializeToString,
4648
response_deserializer=spark_dot_connect_dot_base__pb2.ConfigResponse.FromString,
49+
_registered_method=True,
4750
)
4851
self.AddArtifacts = channel.stream_unary(
4952
"/spark.connect.SparkConnectService/AddArtifacts",
5053
request_serializer=spark_dot_connect_dot_base__pb2.AddArtifactsRequest.SerializeToString,
5154
response_deserializer=spark_dot_connect_dot_base__pb2.AddArtifactsResponse.FromString,
55+
_registered_method=True,
5256
)
5357
self.ArtifactStatus = channel.unary_unary(
5458
"/spark.connect.SparkConnectService/ArtifactStatus",
5559
request_serializer=spark_dot_connect_dot_base__pb2.ArtifactStatusesRequest.SerializeToString,
5660
response_deserializer=spark_dot_connect_dot_base__pb2.ArtifactStatusesResponse.FromString,
61+
_registered_method=True,
5762
)
5863
self.Interrupt = channel.unary_unary(
5964
"/spark.connect.SparkConnectService/Interrupt",
6065
request_serializer=spark_dot_connect_dot_base__pb2.InterruptRequest.SerializeToString,
6166
response_deserializer=spark_dot_connect_dot_base__pb2.InterruptResponse.FromString,
67+
_registered_method=True,
6268
)
6369
self.ReattachExecute = channel.unary_stream(
6470
"/spark.connect.SparkConnectService/ReattachExecute",
6571
request_serializer=spark_dot_connect_dot_base__pb2.ReattachExecuteRequest.SerializeToString,
6672
response_deserializer=spark_dot_connect_dot_base__pb2.ExecutePlanResponse.FromString,
73+
_registered_method=True,
6774
)
6875
self.ReleaseExecute = channel.unary_unary(
6976
"/spark.connect.SparkConnectService/ReleaseExecute",
7077
request_serializer=spark_dot_connect_dot_base__pb2.ReleaseExecuteRequest.SerializeToString,
7178
response_deserializer=spark_dot_connect_dot_base__pb2.ReleaseExecuteResponse.FromString,
79+
_registered_method=True,
7280
)
7381
self.ReleaseSession = channel.unary_unary(
7482
"/spark.connect.SparkConnectService/ReleaseSession",
7583
request_serializer=spark_dot_connect_dot_base__pb2.ReleaseSessionRequest.SerializeToString,
7684
response_deserializer=spark_dot_connect_dot_base__pb2.ReleaseSessionResponse.FromString,
85+
_registered_method=True,
7786
)
7887
self.FetchErrorDetails = channel.unary_unary(
7988
"/spark.connect.SparkConnectService/FetchErrorDetails",
8089
request_serializer=spark_dot_connect_dot_base__pb2.FetchErrorDetailsRequest.SerializeToString,
8190
response_deserializer=spark_dot_connect_dot_base__pb2.FetchErrorDetailsResponse.FromString,
91+
_registered_method=True,
8292
)
8393

8494

@@ -220,6 +230,7 @@ def add_SparkConnectServiceServicer_to_server(servicer, server):
220230
"spark.connect.SparkConnectService", rpc_method_handlers
221231
)
222232
server.add_generic_rpc_handlers((generic_handler,))
233+
server.add_registered_method_handlers("spark.connect.SparkConnectService", rpc_method_handlers)
223234

224235

225236
# This class is part of an EXPERIMENTAL API.
@@ -253,6 +264,7 @@ def ExecutePlan(
253264
wait_for_ready,
254265
timeout,
255266
metadata,
267+
_registered_method=True,
256268
)
257269

258270
@staticmethod
@@ -282,6 +294,7 @@ def AnalyzePlan(
282294
wait_for_ready,
283295
timeout,
284296
metadata,
297+
_registered_method=True,
285298
)
286299

287300
@staticmethod
@@ -311,6 +324,7 @@ def Config(
311324
wait_for_ready,
312325
timeout,
313326
metadata,
327+
_registered_method=True,
314328
)
315329

316330
@staticmethod
@@ -340,6 +354,7 @@ def AddArtifacts(
340354
wait_for_ready,
341355
timeout,
342356
metadata,
357+
_registered_method=True,
343358
)
344359

345360
@staticmethod
@@ -369,6 +384,7 @@ def ArtifactStatus(
369384
wait_for_ready,
370385
timeout,
371386
metadata,
387+
_registered_method=True,
372388
)
373389

374390
@staticmethod
@@ -398,6 +414,7 @@ def Interrupt(
398414
wait_for_ready,
399415
timeout,
400416
metadata,
417+
_registered_method=True,
401418
)
402419

403420
@staticmethod
@@ -427,6 +444,7 @@ def ReattachExecute(
427444
wait_for_ready,
428445
timeout,
429446
metadata,
447+
_registered_method=True,
430448
)
431449

432450
@staticmethod
@@ -456,6 +474,7 @@ def ReleaseExecute(
456474
wait_for_ready,
457475
timeout,
458476
metadata,
477+
_registered_method=True,
459478
)
460479

461480
@staticmethod
@@ -485,6 +504,7 @@ def ReleaseSession(
485504
wait_for_ready,
486505
timeout,
487506
metadata,
507+
_registered_method=True,
488508
)
489509

490510
@staticmethod
@@ -514,4 +534,5 @@ def FetchErrorDetails(
514534
wait_for_ready,
515535
timeout,
516536
metadata,
537+
_registered_method=True,
517538
)

0 commit comments

Comments
 (0)