diff --git a/.github/workflows/test-package.yml b/.github/workflows/test-package.yml index c7899383..7b3e6ba6 100644 --- a/.github/workflows/test-package.yml +++ b/.github/workflows/test-package.yml @@ -62,7 +62,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: [3.9, '3.10', '3.11'] + python-version: [3.9, '3.10', '3.11', '3.12'] env: PYTHON_VERSION: ${{ matrix.python-version }} diff --git a/CODEOWNERS b/CODEOWNERS index 53ed855d..a61d6a58 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1 +1 @@ -* @fdosani @ak-gupta @jdawang @gladysteh99 @NikhilJArora +* @fdosani @ak-gupta @jdawang @gladysteh99 diff --git a/README.md b/README.md index b9abfee5..23128f44 100644 --- a/README.md +++ b/README.md @@ -33,7 +33,6 @@ If you would like to use Spark or any other backends please make sure you instal pip install datacompy[spark] pip install datacompy[dask] pip install datacompy[duckdb] -pip install datacompy[polars] pip install datacompy[ray] ``` @@ -47,7 +46,7 @@ The original ``SparkCompare`` implementation differs from all the other native i If you wish to use the old SparkCompare moving forward you can ```python -import datacompy.legacy.LegacySparkCompare +from datacompy.legacy import LegacySparkCompare ``` #### Supported versions and dependncies @@ -79,7 +78,9 @@ With version ``0.12.0``: > [!NOTE] -> At the current time Python `3.12` is not supported by Spark and also Ray within Fugue. +> At the current time Python `3.12` is not supported by Spark and also Ray within Fugue. +> If you are using Python `3.12` and above, please note that not all functioanlity will be supported. +> Pandas and Polars support should work fine and are tested. ## Supported backends diff --git a/datacompy/__init__.py b/datacompy/__init__.py index b43027ae..6af6e81c 100644 --- a/datacompy/__init__.py +++ b/datacompy/__init__.py @@ -13,7 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "0.12.0" +__version__ = "0.12.1" + +import platform +from warnings import warn from datacompy.core import * from datacompy.fugue import ( @@ -27,3 +30,14 @@ ) from datacompy.polars import PolarsCompare from datacompy.spark import SparkCompare + +major = platform.python_version_tuple()[0] +minor = platform.python_version_tuple()[1] + +if major == "3" and minor >= "12": + warn( + "Python 3.12 and above currently is not supported by Spark and Ray. " + "Please note that some functionality will not work and currently is not supported.", + UserWarning, + stacklevel=2, + ) diff --git a/docs/source/install.rst b/docs/source/install.rst index e6038f73..5fdf1bb7 100644 --- a/docs/source/install.rst +++ b/docs/source/install.rst @@ -2,9 +2,10 @@ Installation ============ -.. note:: +.. important:: - Moving forward ``datacompy`` will not support Python 2. Please make sure you are using Python 3.8+ + If you are using Python 3.12 and above, please note that not all functioanlity will be supported. + Pandas and Polars support should work fine and are tested. PyPI (basic) diff --git a/docs/source/spark_usage.rst b/docs/source/spark_usage.rst index a532316e..5f35724a 100644 --- a/docs/source/spark_usage.rst +++ b/docs/source/spark_usage.rst @@ -13,7 +13,7 @@ Spark (Pandas on Spark) Usage .. code-block:: python - import datacompy.legacy.LegacySparkCompare + from datacompy.legacy import LegacySparkCompare diff --git a/pyproject.toml b/pyproject.toml index 3bde2d70..2636643b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,7 +11,13 @@ maintainers = [ { name="Faisal Dosani", email="faisal.dosani@capitalone.com" } ] license = {text = "Apache Software License"} -dependencies = ["pandas<=2.2.2,>=0.25.0", "numpy<=1.26.4,>=1.22.0", "ordered-set<=4.1.0,>=4.0.2", "fugue<=0.8.7,>=0.8.7"] +dependencies = [ + "pandas<=2.2.2,>=0.25.0", + "numpy<=1.26.4,>=1.22.0", + "ordered-set<=4.1.0,>=4.0.2", + "fugue<=0.8.7,>=0.8.7", + "polars<=0.20.27,>=0.20.4", +] requires-python = ">=3.9.0" classifiers = [ "Intended Audience :: Developers", @@ -23,6 +29,7 @@ classifiers = [ "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", ] dynamic = ["version"] @@ -54,7 +61,6 @@ python-tag = "py3" [project.optional-dependencies] duckdb = ["fugue[duckdb]"] -polars = ["polars"] spark = ["pyspark>=3.1.1; python_version < \"3.11\"", "pyspark>=3.4; python_version >= \"3.11\""] dask = ["fugue[dask]"] ray = ["fugue[ray]"] @@ -65,7 +71,7 @@ tests-spark = ["pytest", "pytest-cov", "pytest-spark", "spark"] qa = ["pre-commit", "black", "isort", "mypy", "pandas-stubs"] build = ["build", "twine", "wheel"] edgetest = ["edgetest", "edgetest-conda"] -dev = ["datacompy[duckdb]", "datacompy[polars]", "datacompy[spark]", "datacompy[docs]", "datacompy[tests]", "datacompy[tests-spark]", "datacompy[qa]", "datacompy[build]"] +dev = ["datacompy[duckdb]", "datacompy[spark]", "datacompy[docs]", "datacompy[tests]", "datacompy[tests-spark]", "datacompy[qa]", "datacompy[build]"] [tool.isort] multi_line_output = 3 @@ -96,4 +102,5 @@ upgrade = [ "numpy", "ordered-set", "fugue", + "polars", ]