Skip to content

Commit

Permalink
Merge branch 'main' into groupby_value_counts_sort
Browse files Browse the repository at this point in the history
  • Loading branch information
rhshadrach authored Oct 6, 2024
2 parents ede98f7 + 05fa958 commit d7766d7
Show file tree
Hide file tree
Showing 195 changed files with 2,650 additions and 1,447 deletions.
14 changes: 12 additions & 2 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,13 @@ jobs:
no_output_timeout: 30m # Sometimes the tests won't generate any output, make sure the job doesn't get killed by that
command: |
pip3 install cibuildwheel==2.20.0
cibuildwheel --output-dir wheelhouse
if [[ $CIBW_BUILD == cp313t* ]]; then
# TODO: temporarily run 3.13 free threaded builds without build isolation
# since we need pre-release cython
CIBW_BUILD_FRONTEND="pip; args: --no-build-isolation" cibuildwheel --output-dir wheelhouse
else
cibuildwheel --output-dir wheelhouse
fi
environment:
CIBW_BUILD: << parameters.cibw-build >>
Expand Down Expand Up @@ -141,6 +147,10 @@ workflows:
cibw-build: ["cp310-manylinux_aarch64",
"cp311-manylinux_aarch64",
"cp312-manylinux_aarch64",
"cp313-manylinux_aarch64",
"cp313t-manylinux_aarch64",
"cp310-musllinux_aarch64",
"cp311-musllinux_aarch64",
"cp312-musllinux_aarch64",]
"cp312-musllinux_aarch64",
"cp313-musllinux_aarch64",
"cp313t-musllinux_aarch64"]
2 changes: 2 additions & 0 deletions .github/actions/setup-conda/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@ runs:
- name: Install ${{ inputs.environment-file }}
uses: mamba-org/setup-micromamba@v1
with:
# Pinning to avoid 2.0 failures
micromamba-version: '1.5.10-0'
environment-file: ${{ inputs.environment-file }}
environment-name: test
condarc-file: ci/.condarc
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/code-checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,11 @@ on:
push:
branches:
- main
- 2.2.x
- 2.3.x
pull_request:
branches:
- main
- 2.2.x
- 2.3.x

env:
ENV_FILE: environment.yml
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/docbuild-and-upload.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,13 @@ on:
push:
branches:
- main
- 2.2.x
- 2.3.x
tags:
- '*'
pull_request:
branches:
- main
- 2.2.x
- 2.3.x

env:
ENV_FILE: environment.yml
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/package-checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,11 @@ on:
push:
branches:
- main
- 2.2.x
- 2.3.x
pull_request:
branches:
- main
- 2.2.x
- 2.3.x
types: [ labeled, opened, synchronize, reopened ]

permissions:
Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/unit-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,11 @@ on:
push:
branches:
- main
- 2.2.x
- 2.3.x
pull_request:
branches:
- main
- 2.2.x
- 2.3.x
paths-ignore:
- "doc/**"
- "web/**"
Expand Down Expand Up @@ -380,7 +380,7 @@ jobs:
fetch-depth: 0

- name: Set up Python Free-threading Version
uses: deadsnakes/action@v3.1.0
uses: deadsnakes/action@v3.2.0
with:
python-version: 3.13-dev
nogil: true
Expand Down
6 changes: 1 addition & 5 deletions .github/workflows/wheels.yml
Original file line number Diff line number Diff line change
Expand Up @@ -102,9 +102,7 @@ jobs:
python: [["cp310", "3.10"], ["cp311", "3.11"], ["cp312", "3.12"], ["cp313", "3.13"], ["cp313t", "3.13"]]
include:
# TODO: Remove this plus installing build deps in cibw_before_build.sh
# and test deps in cibw_before_test.sh after pandas can be built with a released NumPy/Cython
- python: ["cp313", "3.13"]
cibw_build_frontend: 'pip; args: --no-build-isolation'
# after pandas can be built with a released NumPy/Cython
- python: ["cp313t", "3.13"]
cibw_build_frontend: 'pip; args: --no-build-isolation'
# Build Pyodide wheels and upload them to Anaconda.org
Expand Down Expand Up @@ -187,11 +185,9 @@ jobs:
- name: Test Windows Wheels
if: ${{ matrix.buildplat[1] == 'win_amd64' }}
shell: pwsh
# TODO: Remove NumPy nightly install when there's a 3.13 wheel on PyPI
run: |
$TST_CMD = @"
python -m pip install hypothesis>=6.84.0 pytest>=7.3.2 pytest-xdist>=3.4.0;
${{ matrix.python[1] == '3.13' && 'python -m pip install -i https://pypi.anaconda.org/scientific-python-nightly-wheels/simple numpy;' }}
python -m pip install `$(Get-Item pandas\wheelhouse\*.whl);
python -c `'import pandas as pd; pd.test(extra_args=[`\"--no-strict-data-files`\", `\"-m not clipboard and not single_cpu and not slow and not network and not db`\"])`';
"@
Expand Down
1 change: 0 additions & 1 deletion MANIFEST.in
Original file line number Diff line number Diff line change
Expand Up @@ -65,4 +65,3 @@ graft pandas/_libs/include

# Include cibw script in sdist since it's needed for building wheels
include scripts/cibw_before_build.sh
include scripts/cibw_before_test.sh
63 changes: 0 additions & 63 deletions ci/code_checks.sh
Original file line number Diff line number Diff line change
Expand Up @@ -70,14 +70,9 @@ if [[ -z "$CHECK" || "$CHECK" == "docstrings" ]]; then
--format=actions \
-i ES01 `# For now it is ok if docstrings are missing the extended summary` \
-i "pandas.Series.dt PR01" `# Accessors are implemented as classes, but we do not document the Parameters section` \
-i "pandas.NA SA01" \
-i "pandas.Period.freq GL08" \
-i "pandas.Period.ordinal GL08" \
-i "pandas.PeriodDtype.freq SA01" \
-i "pandas.RangeIndex.from_range PR01,SA01" \
-i "pandas.RangeIndex.start SA01" \
-i "pandas.RangeIndex.step SA01" \
-i "pandas.RangeIndex.stop SA01" \
-i "pandas.Series.cat.add_categories PR01,PR02" \
-i "pandas.Series.cat.as_ordered PR01" \
-i "pandas.Series.cat.as_unordered PR01" \
Expand All @@ -92,108 +87,60 @@ if [[ -z "$CHECK" || "$CHECK" == "docstrings" ]]; then
-i "pandas.Series.dt.floor PR01,PR02" \
-i "pandas.Series.dt.freq GL08" \
-i "pandas.Series.dt.month_name PR01,PR02" \
-i "pandas.Series.dt.nanoseconds SA01" \
-i "pandas.Series.dt.normalize PR01" \
-i "pandas.Series.dt.round PR01,PR02" \
-i "pandas.Series.dt.seconds SA01" \
-i "pandas.Series.dt.strftime PR01,PR02" \
-i "pandas.Series.dt.to_period PR01,PR02" \
-i "pandas.Series.dt.total_seconds PR01" \
-i "pandas.Series.dt.tz_convert PR01,PR02" \
-i "pandas.Series.dt.tz_localize PR01,PR02" \
-i "pandas.Series.dt.unit GL08" \
-i "pandas.Series.pad PR01,SA01" \
-i "pandas.Series.sparse.fill_value SA01" \
-i "pandas.Series.sparse.from_coo PR07,SA01" \
-i "pandas.Series.sparse.npoints SA01" \
-i "pandas.Series.sparse.sp_values SA01" \
-i "pandas.Timedelta.components SA01" \
-i "pandas.Timedelta.max PR02" \
-i "pandas.Timedelta.min PR02" \
-i "pandas.Timedelta.resolution PR02" \
-i "pandas.Timedelta.to_timedelta64 SA01" \
-i "pandas.Timedelta.total_seconds SA01" \
-i "pandas.TimedeltaIndex.nanoseconds SA01" \
-i "pandas.TimedeltaIndex.seconds SA01" \
-i "pandas.TimedeltaIndex.to_pytimedelta RT03,SA01" \
-i "pandas.Timestamp.max PR02" \
-i "pandas.Timestamp.min PR02" \
-i "pandas.Timestamp.nanosecond GL08" \
-i "pandas.Timestamp.resolution PR02" \
-i "pandas.Timestamp.tzinfo GL08" \
-i "pandas.Timestamp.year GL08" \
-i "pandas.api.extensions.ExtensionArray.interpolate PR01,SA01" \
-i "pandas.api.types.is_bool PR01,SA01" \
-i "pandas.api.types.is_categorical_dtype SA01" \
-i "pandas.api.types.is_complex PR01,SA01" \
-i "pandas.api.types.is_complex_dtype SA01" \
-i "pandas.api.types.is_datetime64_dtype SA01" \
-i "pandas.api.types.is_datetime64_ns_dtype SA01" \
-i "pandas.api.types.is_datetime64tz_dtype SA01" \
-i "pandas.api.types.is_dict_like PR07,SA01" \
-i "pandas.api.types.is_extension_array_dtype SA01" \
-i "pandas.api.types.is_file_like PR07,SA01" \
-i "pandas.api.types.is_float PR01,SA01" \
-i "pandas.api.types.is_float_dtype SA01" \
-i "pandas.api.types.is_hashable PR01,RT03,SA01" \
-i "pandas.api.types.is_int64_dtype SA01" \
-i "pandas.api.types.is_integer PR01,SA01" \
-i "pandas.api.types.is_integer_dtype SA01" \
-i "pandas.api.types.is_interval_dtype SA01" \
-i "pandas.api.types.is_iterator PR07,SA01" \
-i "pandas.api.types.is_list_like SA01" \
-i "pandas.api.types.is_named_tuple PR07,SA01" \
-i "pandas.api.types.is_object_dtype SA01" \
-i "pandas.api.types.is_re PR07,SA01" \
-i "pandas.api.types.is_re_compilable PR07,SA01" \
-i "pandas.api.types.pandas_dtype PR07,RT03,SA01" \
-i "pandas.arrays.ArrowExtensionArray PR07,SA01" \
-i "pandas.arrays.BooleanArray SA01" \
-i "pandas.arrays.DatetimeArray SA01" \
-i "pandas.arrays.FloatingArray SA01" \
-i "pandas.arrays.IntegerArray SA01" \
-i "pandas.arrays.IntervalArray.left SA01" \
-i "pandas.arrays.IntervalArray.length SA01" \
-i "pandas.arrays.IntervalArray.mid SA01" \
-i "pandas.arrays.IntervalArray.right SA01" \
-i "pandas.arrays.NumpyExtensionArray SA01" \
-i "pandas.arrays.SparseArray PR07,SA01" \
-i "pandas.arrays.TimedeltaArray PR07,SA01" \
-i "pandas.core.groupby.DataFrameGroupBy.__iter__ RT03,SA01" \
-i "pandas.core.groupby.DataFrameGroupBy.agg RT03" \
-i "pandas.core.groupby.DataFrameGroupBy.aggregate RT03" \
-i "pandas.core.groupby.DataFrameGroupBy.boxplot PR07,RT03,SA01" \
-i "pandas.core.groupby.DataFrameGroupBy.filter SA01" \
-i "pandas.core.groupby.DataFrameGroupBy.get_group RT03,SA01" \
-i "pandas.core.groupby.DataFrameGroupBy.groups SA01" \
-i "pandas.core.groupby.DataFrameGroupBy.hist RT03" \
-i "pandas.core.groupby.DataFrameGroupBy.indices SA01" \
-i "pandas.core.groupby.DataFrameGroupBy.max SA01" \
-i "pandas.core.groupby.DataFrameGroupBy.min SA01" \
-i "pandas.core.groupby.DataFrameGroupBy.nth PR02" \
-i "pandas.core.groupby.DataFrameGroupBy.nunique SA01" \
-i "pandas.core.groupby.DataFrameGroupBy.ohlc SA01" \
-i "pandas.core.groupby.DataFrameGroupBy.plot PR02" \
-i "pandas.core.groupby.DataFrameGroupBy.sem SA01" \
-i "pandas.core.groupby.DataFrameGroupBy.sum SA01" \
-i "pandas.core.groupby.SeriesGroupBy.__iter__ RT03,SA01" \
-i "pandas.core.groupby.SeriesGroupBy.agg RT03" \
-i "pandas.core.groupby.SeriesGroupBy.aggregate RT03" \
-i "pandas.core.groupby.SeriesGroupBy.filter PR01,SA01" \
-i "pandas.core.groupby.SeriesGroupBy.get_group RT03,SA01" \
-i "pandas.core.groupby.SeriesGroupBy.groups SA01" \
-i "pandas.core.groupby.SeriesGroupBy.indices SA01" \
-i "pandas.core.groupby.SeriesGroupBy.is_monotonic_decreasing SA01" \
-i "pandas.core.groupby.SeriesGroupBy.is_monotonic_increasing SA01" \
-i "pandas.core.groupby.SeriesGroupBy.max SA01" \
-i "pandas.core.groupby.SeriesGroupBy.min SA01" \
-i "pandas.core.groupby.SeriesGroupBy.nth PR02" \
-i "pandas.core.groupby.SeriesGroupBy.ohlc SA01" \
-i "pandas.core.groupby.SeriesGroupBy.plot PR02" \
-i "pandas.core.groupby.SeriesGroupBy.sem SA01" \
-i "pandas.core.groupby.SeriesGroupBy.sum SA01" \
-i "pandas.core.resample.Resampler.__iter__ RT03,SA01" \
-i "pandas.core.resample.Resampler.ffill RT03" \
-i "pandas.core.resample.Resampler.get_group RT03,SA01" \
-i "pandas.core.resample.Resampler.groups SA01" \
-i "pandas.core.resample.Resampler.indices SA01" \
Expand All @@ -208,24 +155,19 @@ if [[ -z "$CHECK" || "$CHECK" == "docstrings" ]]; then
-i "pandas.core.resample.Resampler.sum SA01" \
-i "pandas.core.resample.Resampler.transform PR01,RT03,SA01" \
-i "pandas.core.resample.Resampler.var SA01" \
-i "pandas.date_range RT03" \
-i "pandas.errors.AttributeConflictWarning SA01" \
-i "pandas.errors.CSSWarning SA01" \
-i "pandas.errors.CategoricalConversionWarning SA01" \
-i "pandas.errors.ChainedAssignmentError SA01" \
-i "pandas.errors.ClosedFileError SA01" \
-i "pandas.errors.DataError SA01" \
-i "pandas.errors.DuplicateLabelError SA01" \
-i "pandas.errors.EmptyDataError SA01" \
-i "pandas.errors.IntCastingNaNError SA01" \
-i "pandas.errors.InvalidIndexError SA01" \
-i "pandas.errors.InvalidVersion SA01" \
-i "pandas.errors.MergeError SA01" \
-i "pandas.errors.NullFrequencyError SA01" \
-i "pandas.errors.NumExprClobberingError SA01" \
-i "pandas.errors.NumbaUtilError SA01" \
-i "pandas.errors.OptionError SA01" \
-i "pandas.errors.OutOfBoundsDatetime SA01" \
-i "pandas.errors.OutOfBoundsTimedelta SA01" \
-i "pandas.errors.PerformanceWarning SA01" \
-i "pandas.errors.PossibleDataLossError SA01" \
Expand All @@ -237,17 +179,14 @@ if [[ -z "$CHECK" || "$CHECK" == "docstrings" ]]; then
-i "pandas.errors.ValueLabelTypeMismatch SA01" \
-i "pandas.infer_freq SA01" \
-i "pandas.io.json.build_table_schema PR07,RT03,SA01" \
-i "pandas.io.stata.StataReader.data_label SA01" \
-i "pandas.io.stata.StataReader.value_labels RT03,SA01" \
-i "pandas.io.stata.StataReader.variable_labels RT03,SA01" \
-i "pandas.io.stata.StataWriter.write_file SA01" \
-i "pandas.json_normalize RT03,SA01" \
-i "pandas.period_range RT03,SA01" \
-i "pandas.plotting.andrews_curves RT03,SA01" \
-i "pandas.plotting.lag_plot RT03,SA01" \
-i "pandas.plotting.scatter_matrix PR07,SA01" \
-i "pandas.set_eng_float_format RT03,SA01" \
-i "pandas.testing.assert_extension_array_equal SA01" \
-i "pandas.tseries.offsets.BDay PR02,SA01" \
-i "pandas.tseries.offsets.BQuarterBegin.is_on_offset GL08" \
-i "pandas.tseries.offsets.BQuarterBegin.n GL08" \
Expand Down Expand Up @@ -399,7 +338,6 @@ if [[ -z "$CHECK" || "$CHECK" == "docstrings" ]]; then
-i "pandas.tseries.offsets.SemiMonthBegin.n GL08" \
-i "pandas.tseries.offsets.SemiMonthBegin.normalize GL08" \
-i "pandas.tseries.offsets.SemiMonthBegin.rule_code GL08" \
-i "pandas.tseries.offsets.SemiMonthEnd SA01" \
-i "pandas.tseries.offsets.SemiMonthEnd.day_of_month GL08" \
-i "pandas.tseries.offsets.SemiMonthEnd.is_on_offset GL08" \
-i "pandas.tseries.offsets.SemiMonthEnd.n GL08" \
Expand All @@ -413,7 +351,6 @@ if [[ -z "$CHECK" || "$CHECK" == "docstrings" ]]; then
-i "pandas.tseries.offsets.Week.n GL08" \
-i "pandas.tseries.offsets.Week.normalize GL08" \
-i "pandas.tseries.offsets.Week.weekday GL08" \
-i "pandas.tseries.offsets.WeekOfMonth SA01" \
-i "pandas.tseries.offsets.WeekOfMonth.is_on_offset GL08" \
-i "pandas.tseries.offsets.WeekOfMonth.n GL08" \
-i "pandas.tseries.offsets.WeekOfMonth.normalize GL08" \
Expand Down
4 changes: 3 additions & 1 deletion doc/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -254,7 +254,9 @@
"json_url": "https://pandas.pydata.org/versions.json",
"version_match": switcher_version,
},
"show_version_warning_banner": True,
# This shows a warning for patch releases since the
# patch version doesn't compare as equal (e.g. 2.2.1 != 2.2.0 but it should be)
"show_version_warning_banner": False,
"icon_links": [
{
"name": "Mastodon",
Expand Down
6 changes: 3 additions & 3 deletions doc/source/development/contributing.rst
Original file line number Diff line number Diff line change
Expand Up @@ -305,15 +305,15 @@ It is important to periodically update your local ``main`` branch with updates f
branch and update your development environment to reflect any changes to the various packages that
are used during development.

If using :ref:`mamba <contributing.mamba>`, run:
If using :ref:`conda <contributing.conda>`, run:

.. code-block:: shell
git checkout main
git fetch upstream
git merge upstream/main
mamba activate pandas-dev
mamba env update -f environment.yml --prune
conda activate pandas-dev
conda env update -f environment.yml --prune
If using :ref:`pip <contributing.pip>` , do:

Expand Down
2 changes: 1 addition & 1 deletion doc/source/development/contributing_codebase.rst
Original file line number Diff line number Diff line change
Expand Up @@ -244,7 +244,7 @@ in your python environment.

.. warning::

* Please be aware that the above commands will use the current python environment. If your python packages are older/newer than those installed by the pandas CI, the above commands might fail. This is often the case when the ``mypy`` or ``numpy`` versions do not match. Please see :ref:`how to setup the python environment <contributing.mamba>` or select a `recently succeeded workflow <https://github.com/pandas-dev/pandas/actions/workflows/code-checks.yml?query=branch%3Amain+is%3Asuccess>`_, select the "Docstring validation, typing, and other manual pre-commit hooks" job, then click on "Set up Conda" and "Environment info" to see which versions the pandas CI installs.
* Please be aware that the above commands will use the current python environment. If your python packages are older/newer than those installed by the pandas CI, the above commands might fail. This is often the case when the ``mypy`` or ``numpy`` versions do not match. Please see :ref:`how to setup the python environment <contributing.conda>` or select a `recently succeeded workflow <https://github.com/pandas-dev/pandas/actions/workflows/code-checks.yml?query=branch%3Amain+is%3Asuccess>`_, select the "Docstring validation, typing, and other manual pre-commit hooks" job, then click on "Set up Conda" and "Environment info" to see which versions the pandas CI installs.

.. _contributing.ci:

Expand Down
Loading

0 comments on commit d7766d7

Please sign in to comment.