Skip to content

Commit

Permalink
[SPARK-44370][CONNECT] Migrate Buf remote generation alpha to remote …
Browse files Browse the repository at this point in the history
…plugins

### What changes were proposed in this pull request?
Buf unsupported remote generation alpha at now. Please refer https://buf.build/docs/migration-guides/migrate-remote-generation-alpha/ . We should migrate Buf remote generation alpha to remote plugins by follow the guide.

The CI also broken by this reason.

### Why are the changes needed?
Migrate Buf remote generation alpha to remote plugins because remote generation alpha features have been sunset.

### Does this PR introduce _any_ user-facing change?
No

### How was this patch tested?
exist test

Closes #41933 from Hisoka-X/SPARK-44370_buf_migrate.

Authored-by: Jia Fan <fanjiaeminem@qq.com>
Signed-off-by: Hyukjin Kwon <gurwls223@apache.org>
  • Loading branch information
Hisoka-X authored and HyukjinKwon committed Jul 12, 2023
1 parent 6c02bd0 commit c5c4ec7
Show file tree
Hide file tree
Showing 9 changed files with 29 additions and 2,930 deletions.
10 changes: 5 additions & 5 deletions connector/connect/common/src/main/buf.gen.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -16,18 +16,18 @@
#
version: v1
plugins:
- remote: buf.build/protocolbuffers/plugins/cpp:v3.20.0-1
- plugin: buf.build/protocolbuffers/cpp:v21.7
out: gen/proto/cpp
- remote: buf.build/protocolbuffers/plugins/csharp:v3.20.0-1
- plugin: buf.build/protocolbuffers/csharp:v21.7
out: gen/proto/csharp
- remote: buf.build/protocolbuffers/plugins/java:v3.20.0-1
- plugin: buf.build/protocolbuffers/java:v21.7
out: gen/proto/java
- plugin: buf.build/grpc/ruby:v1.56.0
out: gen/proto/ruby
- remote: buf.build/protocolbuffers/plugins/ruby:v21.2.0-1
- plugin: buf.build/protocolbuffers/ruby:v21.7
out: gen/proto/ruby
# Building the Python build and building the mypy interfaces.
- remote: buf.build/protocolbuffers/plugins/python:v3.19.3-1
- plugin: buf.build/protocolbuffers/python:v21.7
out: gen/proto/python
- plugin: buf.build/grpc/python:v1.56.0
out: gen/proto/python
Expand Down
748 changes: 3 additions & 745 deletions python/pyspark/sql/connect/proto/base_pb2.py

Large diffs are not rendered by default.

352 changes: 3 additions & 349 deletions python/pyspark/sql/connect/proto/catalog_pb2.py

Large diffs are not rendered by default.

432 changes: 3 additions & 429 deletions python/pyspark/sql/connect/proto/commands_pb2.py

Large diffs are not rendered by default.

30 changes: 3 additions & 27 deletions python/pyspark/sql/connect/proto/common_pb2.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,9 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: spark/connect/common.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import builder as _builder
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database

# @@protoc_insertion_point(imports)
Expand All @@ -33,31 +32,8 @@
b'\n\x1aspark/connect/common.proto\x12\rspark.connect"\xb0\x01\n\x0cStorageLevel\x12\x19\n\x08use_disk\x18\x01 \x01(\x08R\x07useDisk\x12\x1d\n\nuse_memory\x18\x02 \x01(\x08R\tuseMemory\x12 \n\x0cuse_off_heap\x18\x03 \x01(\x08R\nuseOffHeap\x12"\n\x0c\x64\x65serialized\x18\x04 \x01(\x08R\x0c\x64\x65serialized\x12 \n\x0breplication\x18\x05 \x01(\x05R\x0breplication"G\n\x13ResourceInformation\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x1c\n\taddresses\x18\x02 \x03(\tR\taddressesB6\n\x1eorg.apache.spark.connect.protoP\x01Z\x12internal/generatedb\x06proto3'
)


_STORAGELEVEL = DESCRIPTOR.message_types_by_name["StorageLevel"]
_RESOURCEINFORMATION = DESCRIPTOR.message_types_by_name["ResourceInformation"]
StorageLevel = _reflection.GeneratedProtocolMessageType(
"StorageLevel",
(_message.Message,),
{
"DESCRIPTOR": _STORAGELEVEL,
"__module__": "spark.connect.common_pb2"
# @@protoc_insertion_point(class_scope:spark.connect.StorageLevel)
},
)
_sym_db.RegisterMessage(StorageLevel)

ResourceInformation = _reflection.GeneratedProtocolMessageType(
"ResourceInformation",
(_message.Message,),
{
"DESCRIPTOR": _RESOURCEINFORMATION,
"__module__": "spark.connect.common_pb2"
# @@protoc_insertion_point(class_scope:spark.connect.ResourceInformation)
},
)
_sym_db.RegisterMessage(ResourceInformation)

_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "spark.connect.common_pb2", globals())
if _descriptor._USE_C_DESCRIPTORS == False:

DESCRIPTOR._options = None
Expand Down
42 changes: 3 additions & 39 deletions python/pyspark/sql/connect/proto/example_plugins_pb2.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,9 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: spark/connect/example_plugins.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import builder as _builder
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database

# @@protoc_insertion_point(imports)
Expand All @@ -37,43 +36,8 @@
b'\n#spark/connect/example_plugins.proto\x12\rspark.connect\x1a\x1dspark/connect/relations.proto\x1a\x1fspark/connect/expressions.proto"i\n\x15\x45xamplePluginRelation\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12!\n\x0c\x63ustom_field\x18\x02 \x01(\tR\x0b\x63ustomField"m\n\x17\x45xamplePluginExpression\x12/\n\x05\x63hild\x18\x01 \x01(\x0b\x32\x19.spark.connect.ExpressionR\x05\x63hild\x12!\n\x0c\x63ustom_field\x18\x02 \x01(\tR\x0b\x63ustomField"9\n\x14\x45xamplePluginCommand\x12!\n\x0c\x63ustom_field\x18\x01 \x01(\tR\x0b\x63ustomFieldB6\n\x1eorg.apache.spark.connect.protoP\x01Z\x12internal/generatedb\x06proto3'
)


_EXAMPLEPLUGINRELATION = DESCRIPTOR.message_types_by_name["ExamplePluginRelation"]
_EXAMPLEPLUGINEXPRESSION = DESCRIPTOR.message_types_by_name["ExamplePluginExpression"]
_EXAMPLEPLUGINCOMMAND = DESCRIPTOR.message_types_by_name["ExamplePluginCommand"]
ExamplePluginRelation = _reflection.GeneratedProtocolMessageType(
"ExamplePluginRelation",
(_message.Message,),
{
"DESCRIPTOR": _EXAMPLEPLUGINRELATION,
"__module__": "spark.connect.example_plugins_pb2"
# @@protoc_insertion_point(class_scope:spark.connect.ExamplePluginRelation)
},
)
_sym_db.RegisterMessage(ExamplePluginRelation)

ExamplePluginExpression = _reflection.GeneratedProtocolMessageType(
"ExamplePluginExpression",
(_message.Message,),
{
"DESCRIPTOR": _EXAMPLEPLUGINEXPRESSION,
"__module__": "spark.connect.example_plugins_pb2"
# @@protoc_insertion_point(class_scope:spark.connect.ExamplePluginExpression)
},
)
_sym_db.RegisterMessage(ExamplePluginExpression)

ExamplePluginCommand = _reflection.GeneratedProtocolMessageType(
"ExamplePluginCommand",
(_message.Message,),
{
"DESCRIPTOR": _EXAMPLEPLUGINCOMMAND,
"__module__": "spark.connect.example_plugins_pb2"
# @@protoc_insertion_point(class_scope:spark.connect.ExamplePluginCommand)
},
)
_sym_db.RegisterMessage(ExamplePluginCommand)

_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "spark.connect.example_plugins_pb2", globals())
if _descriptor._USE_C_DESCRIPTORS == False:

DESCRIPTOR._options = None
Expand Down
Loading

0 comments on commit c5c4ec7

Please sign in to comment.