Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[DOP-13853] Update MongoDB package to 10.2.3 #263

Merged
merged 1 commit into from
Apr 26, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion docs/changelog/next_release/255.feature.rst
Original file line number Diff line number Diff line change
@@ -1 +1 @@
:class:`MongoDB` connection now uses MongoDB Spark connector ``10.2.2``, upgraded from ``10.1.1``, and supports passing custom versions: ``MongoDB.get_packages(scala_version=..., package_version=...)``.
:class:`MongoDB` connection now uses MongoDB Spark connector ``10.2.3``, upgraded from ``10.1.1``, and supports passing custom versions: ``MongoDB.get_packages(scala_version=..., package_version=...)``.
4 changes: 2 additions & 2 deletions docs/connection/db_connection/mongodb/types.rst
Original file line number Diff line number Diff line change
Expand Up @@ -73,8 +73,8 @@ References

Here you can find source code with type conversions:

* `MongoDB -> Spark <https://github.com/mongodb/mongo-spark/blob/r10.1.1/src/main/java/com/mongodb/spark/sql/connector/schema/InferSchema.java#L121-L170>`_
* `Spark -> MongoDB <https://github.com/mongodb/mongo-spark/blob/r10.1.1/src/main/java/com/mongodb/spark/sql/connector/schema/RowToBsonDocumentConverter.java#L117-L200>`_
* `MongoDB -> Spark <https://github.com/mongodb/mongo-spark/blob/r10.2.3/src/main/java/com/mongodb/spark/sql/connector/schema/InferSchema.java#L130-L176>`_
* `Spark -> MongoDB <https://github.com/mongodb/mongo-spark/blob/r10.2.3/src/main/java/com/mongodb/spark/sql/connector/schema/RowToBsonDocumentConverter.java#L156-L252>`_

Supported types
---------------
Expand Down
16 changes: 8 additions & 8 deletions onetl/connection/db_connection/mongodb/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,8 @@ class Config:
class MongoDB(DBConnection):
"""MongoDB connection. |support_hooks|

Based on package ``org.mongodb.spark:mongo-spark-connector:10.1.1``
(`MongoDB connector for Spark <https://www.mongodb.com/docs/spark-connector/v10.1/>`_)
Based on package `org.mongodb.spark:mongo-spark-connector:10.2.3 <https://mvnrepository.com/artifact/org.mongodb.spark/mongo-spark-connector_2.12/10.2.3>`_
(`MongoDB connector for Spark <https://www.mongodb.com/docs/spark-connector/v10.2/>`_)

.. warning::

Expand Down Expand Up @@ -149,7 +149,7 @@ def get_packages(
Spark version in format ``major.minor``. Used only if ``scala_version=None``.

package_version : str, optional
Specifies the version of the MongoDB Spark connector to use. Defaults to ``10.2.2``.
Specifies the version of the MongoDB Spark connector to use. Defaults to ``10.2.3``.

Examples
--------
Expand All @@ -160,10 +160,10 @@ def get_packages(
MongoDB.get_packages(scala_version="2.12")

# specify custom connector version
MongoDB.get_packages(scala_version="2.12", package_version="10.2.2")
MongoDB.get_packages(scala_version="2.12", package_version="10.2.3")
"""

default_package_version = "10.2.2"
default_package_version = "10.2.3"

if scala_version:
scala_ver = Version(scala_version).min_digits(2)
Expand All @@ -190,7 +190,7 @@ def package_spark_3_2(cls) -> str:
"use `MongoDB.get_packages(spark_version='3.2')` instead"
)
warnings.warn(msg, UserWarning, stacklevel=3)
return "org.mongodb.spark:mongo-spark-connector_2.12:10.2.2"
return "org.mongodb.spark:mongo-spark-connector_2.12:10.2.3"

@classproperty
def package_spark_3_3(cls) -> str:
Expand All @@ -200,7 +200,7 @@ def package_spark_3_3(cls) -> str:
"use `MongoDB.get_packages(spark_version='3.3')` instead"
)
warnings.warn(msg, UserWarning, stacklevel=3)
return "org.mongodb.spark:mongo-spark-connector_2.12:10.2.2"
return "org.mongodb.spark:mongo-spark-connector_2.12:10.2.3"

@classproperty
def package_spark_3_4(cls) -> str:
Expand All @@ -210,7 +210,7 @@ def package_spark_3_4(cls) -> str:
"use `MongoDB.get_packages(spark_version='3.4')` instead"
)
warnings.warn(msg, UserWarning, stacklevel=3)
return "org.mongodb.spark:mongo-spark-connector_2.12:10.2.2"
return "org.mongodb.spark:mongo-spark-connector_2.12:10.2.3"

@slot
def pipeline(
Expand Down
18 changes: 9 additions & 9 deletions tests/tests_unit/tests_db_connection_unit/test_mongodb_unit.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@
def test_mongodb_package():
warning_msg = re.escape("will be removed in 1.0.0, use `MongoDB.get_packages(spark_version=")
with pytest.warns(UserWarning, match=warning_msg):
assert MongoDB.package_spark_3_2 == "org.mongodb.spark:mongo-spark-connector_2.12:10.2.2"
assert MongoDB.package_spark_3_3 == "org.mongodb.spark:mongo-spark-connector_2.12:10.2.2"
assert MongoDB.package_spark_3_4 == "org.mongodb.spark:mongo-spark-connector_2.12:10.2.2"
assert MongoDB.package_spark_3_2 == "org.mongodb.spark:mongo-spark-connector_2.12:10.2.3"
assert MongoDB.package_spark_3_3 == "org.mongodb.spark:mongo-spark-connector_2.12:10.2.3"
assert MongoDB.package_spark_3_4 == "org.mongodb.spark:mongo-spark-connector_2.12:10.2.3"


def test_mongodb_get_packages_no_input():
Expand Down Expand Up @@ -50,16 +50,16 @@ def test_mongodb_get_packages_scala_version_not_supported(scala_version):
@pytest.mark.parametrize(
"spark_version, scala_version, package_version, package",
[
(None, "2.12", "10.2.2", "org.mongodb.spark:mongo-spark-connector_2.12:10.2.2"),
(None, "2.13", "10.2.2", "org.mongodb.spark:mongo-spark-connector_2.13:10.2.2"),
("3.2", None, "10.2.2", "org.mongodb.spark:mongo-spark-connector_2.12:10.2.2"),
("3.3", None, "10.2.2", "org.mongodb.spark:mongo-spark-connector_2.12:10.2.2"),
("3.4", None, "10.2.2", "org.mongodb.spark:mongo-spark-connector_2.12:10.2.2"),
(None, "2.12", "10.2.3", "org.mongodb.spark:mongo-spark-connector_2.12:10.2.3"),
(None, "2.13", "10.2.3", "org.mongodb.spark:mongo-spark-connector_2.13:10.2.3"),
("3.2", None, "10.2.3", "org.mongodb.spark:mongo-spark-connector_2.12:10.2.3"),
("3.3", None, "10.2.3", "org.mongodb.spark:mongo-spark-connector_2.12:10.2.3"),
("3.4", None, "10.2.3", "org.mongodb.spark:mongo-spark-connector_2.12:10.2.3"),
("3.2", "2.12", "10.1.1", "org.mongodb.spark:mongo-spark-connector_2.12:10.1.1"),
("3.4", "2.13", "10.1.1", "org.mongodb.spark:mongo-spark-connector_2.13:10.1.1"),
("3.2", "2.12", "10.2.1", "org.mongodb.spark:mongo-spark-connector_2.12:10.2.1"),
("3.2", "2.12", "10.2.0", "org.mongodb.spark:mongo-spark-connector_2.12:10.2.0"),
("3.2.4", "2.12.1", "10.2.2", "org.mongodb.spark:mongo-spark-connector_2.12:10.2.2"),
("3.2.4", "2.12.1", "10.2.3", "org.mongodb.spark:mongo-spark-connector_2.12:10.2.3"),
],
)
def test_mongodb_get_packages(spark_version, scala_version, package_version, package):
Expand Down
Loading