Skip to content

Commit

Permalink
Merge branch 'main' into cell-method-parse
Browse files Browse the repository at this point in the history
  • Loading branch information
trexfeathers authored Oct 9, 2024
2 parents 41d4c79 + cc60a60 commit fc1e0dd
Show file tree
Hide file tree
Showing 68 changed files with 3,913 additions and 1,855 deletions.
13 changes: 9 additions & 4 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,15 @@ version: 2
updates:

- package-ecosystem: "github-actions"
directory: "/"
directories:
- "/"
- "/.github/workflows/composite/*"
schedule:
# Check for updates to GitHub Actions every weekday
interval: "daily"
# Check later in the week - the upstream dependabot check in `workflows` runs deliberately early in the week.
# Therefore allowing time for the `workflows` update to be merged-and-released first.
interval: "weekly"
day: "thursday"
time: "01:00"
timezone: "Europe/London"
labels:
- "New: Pull Request"
- "Bot"
2 changes: 1 addition & 1 deletion .github/workflows/ci-manifest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,4 +23,4 @@ concurrency:
jobs:
manifest:
name: "check-manifest"
uses: scitools/workflows/.github/workflows/ci-manifest.yml@2024.07.5
uses: scitools/workflows/.github/workflows/ci-manifest.yml@2024.09.9
13 changes: 6 additions & 7 deletions .github/workflows/ci-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -68,30 +68,29 @@ jobs:
- name: "data cache"
uses: ./.github/workflows/composite/iris-data-cache
with:
cache_build: 0
cache_build: 6
env_name: ${{ env.ENV_NAME }}
version: ${{ env.IRIS_TEST_DATA_VERSION }}

- name: "conda package cache"
uses: ./.github/workflows/composite/conda-pkg-cache
with:
cache_build: 0
cache_build: 6
cache_period: ${{ env.CACHE_PERIOD }}
env_name: ${{ env.ENV_NAME }}

- name: "conda install"
uses: conda-incubator/setup-miniconda@v3
with:
miniforge-version: latest
channels: conda-forge,defaults
channels: conda-forge
activate-environment: ${{ env.ENV_NAME }}
auto-update-conda: false
use-only-tar-bz2: true

- name: "conda environment cache"
uses: ./.github/workflows/composite/conda-env-cache
with:
cache_build: 0
cache_build: 6
cache_period: ${{ env.CACHE_PERIOD }}
env_name: ${{ env.ENV_NAME }}
install_packages: "cartopy nox pip"
Expand All @@ -104,14 +103,14 @@ jobs:
- name: "cartopy cache"
uses: ./.github/workflows/composite/cartopy-cache
with:
cache_build: 0
cache_build: 6
cache_period: ${{ env.CACHE_PERIOD }}
env_name: ${{ env.ENV_NAME }}

- name: "nox cache"
uses: ./.github/workflows/composite/nox-cache
with:
cache_build: 2
cache_build: 6
env_name: ${{ env.ENV_NAME }}
lock_file: ${{ env.LOCK_FILE }}

Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/composite/cartopy-cache/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ inputs:
runs:
using: "composite"
steps:
- uses: actions/cache@v3
- uses: actions/cache@v4
id: cartopy-cache
with:
path: ~/.local/share/cartopy
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/composite/conda-env-cache/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ inputs:
runs:
using: "composite"
steps:
- uses: actions/cache@v3
- uses: actions/cache@v4
id: conda-env-cache
with:
path: ${{ env.CONDA }}/envs/${{ inputs.env_name }}
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/composite/conda-pkg-cache/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ inputs:
runs:
using: "composite"
steps:
- uses: actions/cache@v3
- uses: actions/cache@v4
with:
path: ~/conda_pkgs_dir
key: ${{ runner.os }}-conda-pkgs-${{ inputs.env_name }}-p${{ inputs.cache_period }}-b${{ inputs.cache_build }}
2 changes: 1 addition & 1 deletion .github/workflows/composite/iris-data-cache/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ inputs:
runs:
using: "composite"
steps:
- uses: actions/cache@v3
- uses: actions/cache@v4
id: data-cache
with:
path: ~/iris-test-data
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/composite/nox-cache/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ inputs:
runs:
using: "composite"
steps:
- uses: actions/cache@v3
- uses: actions/cache@v4
with:
path: ${{ github.workspace }}/.nox
key: ${{ runner.os }}-nox-${{ inputs.env_name }}-s${{ matrix.session }}-py${{ matrix.python-version }}-b${{ inputs.cache_build }}-${{ hashFiles(inputs.lock_file) }}
2 changes: 1 addition & 1 deletion .github/workflows/refresh-lockfiles.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,5 +14,5 @@ on:

jobs:
refresh_lockfiles:
uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2024.07.5
uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2024.09.9
secrets: inherit
12 changes: 6 additions & 6 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ minimum_pre_commit_version: 1.21.0

repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.6.0
rev: v5.0.0
hooks:
# Prevent giant files from being committed.
- id: check-added-large-files
Expand All @@ -29,7 +29,7 @@ repos:
- id: no-commit-to-branch

- repo: https://github.com/astral-sh/ruff-pre-commit
rev: "v0.4.10"
rev: "v0.6.9"
hooks:
- id: ruff
types: [file, python]
Expand All @@ -45,13 +45,13 @@ repos:
additional_dependencies: [tomli]

- repo: https://github.com/PyCQA/flake8
rev: 7.1.0
rev: 7.1.1
hooks:
- id: flake8
types: [file, python]

- repo: https://github.com/asottile/blacken-docs
rev: 1.16.0
rev: 1.18.0
hooks:
- id: blacken-docs
types: [file, rst]
Expand All @@ -63,15 +63,15 @@ repos:
types: [file, python]

- repo: https://github.com/pre-commit/mirrors-mypy
rev: 'v1.9.0'
rev: 'v1.11.2'
hooks:
- id: mypy
additional_dependencies:
- 'types-requests'
exclude: 'noxfile\.py|docs/src/conf\.py'

- repo: https://github.com/numpy/numpydoc
rev: v1.7.0
rev: v1.8.0
hooks:
- id: numpydoc-validation
exclude: "^lib/iris/tests/|docs/gallery_code/"
Expand Down
6 changes: 6 additions & 0 deletions benchmarks/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,12 @@ repeats _between_ `setup()` calls using the `repeat` attribute.
`warmup_time = 0` is also advisable since ASV performs independent re-runs to
estimate run-time, and these will still be subject to the original problem.

### Custom benchmarks

Iris benchmarking implements custom benchmark types, such as a `tracemalloc`
benchmark to measure memory growth. See [custom_bms/](./custom_bms) for more
detail.

### Scaling / non-Scaling Performance Differences

**(We no longer advocate the below for benchmarks run during CI, given the
Expand Down
7 changes: 5 additions & 2 deletions benchmarks/asv.conf.json
Original file line number Diff line number Diff line change
Expand Up @@ -53,9 +53,12 @@
"command_comment": [
"We know that the Nox command takes care of installation in each",
"environment, and in the case of Iris no specialised uninstall or",
"build commands are needed to get it working."
"build commands are needed to get it working.",

"We do however need to install the custom benchmarks for them to be",
"usable."
],
"install_command": [],
"uninstall_command": [],
"build_command": []
"build_command": ["python {conf_dir}/custom_bms/install.py"]
}
105 changes: 0 additions & 105 deletions benchmarks/benchmarks/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,111 +37,6 @@ def disable_repeat_between_setup(benchmark_object):
return benchmark_object


class TrackAddedMemoryAllocation:
"""Measures by how much process resident memory grew, during execution.
Context manager which measures by how much process resident memory grew,
during execution of its enclosed code block.
Obviously limited as to what it actually measures : Relies on the current
process not having significant unused (de-allocated) memory when the
tested codeblock runs, and only reliable when the code allocates a
significant amount of new memory.
Example:
with TrackAddedMemoryAllocation() as mb:
initial_call()
other_call()
result = mb.addedmem_mb()
Attributes
----------
RESULT_MINIMUM_MB : float
The smallest result that should ever be returned, in Mb. Results
fluctuate from run to run (usually within 1Mb) so if a result is
sufficiently small this noise will produce a before-after ratio over
AVD's detection threshold and be treated as 'signal'. Results
smaller than this value will therefore be returned as equal to this
value, ensuring fractionally small noise / no noise at all.
Defaults to 1.0
RESULT_ROUND_DP : int
Number of decimal places of rounding on result values (in Mb).
Defaults to 1
"""

RESULT_MINIMUM_MB = 0.2
RESULT_ROUND_DP = 1 # I.E. to nearest 0.1 Mb

def __enter__(self):
tracemalloc.start()
return self

def __exit__(self, *_):
_, peak_mem_bytes = tracemalloc.get_traced_memory()
tracemalloc.stop()
# Save peak-memory allocation, scaled from bytes to Mb.
self._peak_mb = peak_mem_bytes * (2.0**-20)

def addedmem_mb(self):
"""Return measured memory growth, in Mb."""
result = self._peak_mb
# Small results are too vulnerable to noise being interpreted as signal.
result = max(self.RESULT_MINIMUM_MB, result)
# Rounding makes results easier to read.
result = np.round(result, self.RESULT_ROUND_DP)
return result

@staticmethod
def decorator(decorated_func):
"""Benchmark to track growth in resident memory during execution.
Intended for use on ASV ``track_`` benchmarks. Applies the
:class:`TrackAddedMemoryAllocation` context manager to the benchmark
code, sets the benchmark ``unit`` attribute to ``Mb``.
"""

def _wrapper(*args, **kwargs):
assert decorated_func.__name__[:6] == "track_"
# Run the decorated benchmark within the added memory context
# manager.
with TrackAddedMemoryAllocation() as mb:
decorated_func(*args, **kwargs)
return mb.addedmem_mb()

decorated_func.unit = "Mb"
return _wrapper

@staticmethod
def decorator_repeating(repeats=3):
"""Benchmark to track growth in resident memory during execution.
Tracks memory for repeated calls of decorated function.
Intended for use on ASV ``track_`` benchmarks. Applies the
:class:`TrackAddedMemoryAllocation` context manager to the benchmark
code, sets the benchmark ``unit`` attribute to ``Mb``.
"""

def decorator(decorated_func):
def _wrapper(*args, **kwargs):
assert decorated_func.__name__[:6] == "track_"
# Run the decorated benchmark within the added memory context
# manager.
with TrackAddedMemoryAllocation() as mb:
for _ in range(repeats):
decorated_func(*args, **kwargs)
return mb.addedmem_mb()

decorated_func.unit = "Mb"
return _wrapper

return decorator


def on_demand_benchmark(benchmark_object):
"""Disable these benchmark(s) unless ON_DEMAND_BENCHARKS env var is set.
Expand Down
5 changes: 2 additions & 3 deletions benchmarks/benchmarks/cperf/save.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

from iris import save

from .. import TrackAddedMemoryAllocation, on_demand_benchmark
from .. import on_demand_benchmark
from ..generate_data.ugrid import make_cube_like_2d_cubesphere, make_cube_like_umfield
from . import _N_CUBESPHERE_UM_EQUIVALENT, _UM_DIMS_YX

Expand Down Expand Up @@ -36,6 +36,5 @@ def _save_data(self, cube):
def time_save_data_netcdf(self, data_type):
self._save_data(self.cube)

@TrackAddedMemoryAllocation.decorator
def track_addedmem_save_data_netcdf(self, data_type):
def tracemalloc_save_data_netcdf(self, data_type):
self._save_data(self.cube)
6 changes: 4 additions & 2 deletions benchmarks/benchmarks/cube.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@
# See LICENSE in the root of the repository for full licensing details.
"""Cube benchmark tests."""

from collections.abc import Iterable

from iris import coords
from iris.cube import Cube

Expand All @@ -21,9 +23,9 @@ def setup(self, w_mesh: bool, _) -> None:
source_cube = realistic_4d_w_everything(w_mesh=w_mesh)

def get_coords_and_dims(
coords_tuple: tuple[coords._DimensionalMetadata, ...],
coords_iter: Iterable[coords._DimensionalMetadata],
) -> list[tuple[coords._DimensionalMetadata, tuple[int, ...]]]:
return [(c, c.cube_dims(source_cube)) for c in coords_tuple]
return [(c, c.cube_dims(source_cube)) for c in coords_iter]

self.cube_kwargs = dict(
data=source_cube.data,
Expand Down
Loading

0 comments on commit fc1e0dd

Please sign in to comment.