Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove optimistix fixed point finders from API. #42

Merged
merged 1 commit into from
May 1, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 0 additions & 30 deletions bayeux/_src/optimize/optimistix.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,41 +70,11 @@ class BFGS(_OptimistixOptimizer):
optimizer = "BFGS"


class Chord(_OptimistixOptimizer):
name = "optimistix_chord"
optimizer = "Chord"


class Dogleg(_OptimistixOptimizer):
name = "optimistix_dogleg"
optimizer = "Dogleg"


class GaussNewton(_OptimistixOptimizer):
name = "optimistix_gauss_newton"
optimizer = "GaussNewton"


class IndirectLevenbergMarquardt(_OptimistixOptimizer):
name = "optimistix_indirect_levenberg_marquardt"
optimizer = "IndirectLevenbergMarquardt"


class LevenbergMarquardt(_OptimistixOptimizer):
name = "optimistix_levenberg_marquardt"
optimizer = "LevenbergMarquardt"


class NelderMead(_OptimistixOptimizer):
name = "optimistix_nelder_mead"
optimizer = "NelderMead"


class Newton(_OptimistixOptimizer):
name = "optimistix_newton"
optimizer = "Newton"


class NonlinearCG(_OptimistixOptimizer):
name = "optimistix_nonlinear_cg"
optimizer = "NonlinearCG"
2 changes: 2 additions & 0 deletions bayeux/_src/optimize/shared.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,8 @@ def get_optimizer_kwargs(optimizer, kwargs, ignore_required=None):
f"{','.join(optimizer_required)}. Probably file a bug, but "
"you can try to manually supply them as keywords."
)
optimizer_kwargs.update(
{k: kwargs[k] for k in optimizer_kwargs if k in kwargs})
return optimizer_kwargs


Expand Down
32 changes: 10 additions & 22 deletions bayeux/optimize/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,28 +26,6 @@
from bayeux._src.optimize.jaxopt import NonlinearCG
__all__.extend(["BFGS", "GradientDescent", "LBFGS", "NonlinearCG"])

if importlib.util.find_spec("optimistix") is not None:
from bayeux._src.optimize.optimistix import BFGS as optimistix_BFGS
from bayeux._src.optimize.optimistix import Chord
from bayeux._src.optimize.optimistix import Dogleg
from bayeux._src.optimize.optimistix import GaussNewton
from bayeux._src.optimize.optimistix import IndirectLevenbergMarquardt
from bayeux._src.optimize.optimistix import LevenbergMarquardt
from bayeux._src.optimize.optimistix import NelderMead
from bayeux._src.optimize.optimistix import Newton
from bayeux._src.optimize.optimistix import NonlinearCG as optimistix_NonlinearCG

__all__.extend([
"optimistix_BFGS",
"Chord",
"Dogleg",
"GaussNewton",
"IndirectLevenbergMarquardt",
"LevenbergMarquardt",
"NelderMead",
"Newton",
"optimistix_NonlinearCG"])

if importlib.util.find_spec("optax") is not None:
from bayeux._src.optimize.optax import AdaBelief
from bayeux._src.optimize.optax import Adafactor
Expand Down Expand Up @@ -92,3 +70,13 @@
"Sm3",
"Yogi",
])

if importlib.util.find_spec("optimistix") is not None:
from bayeux._src.optimize.optimistix import BFGS as optimistix_BFGS
from bayeux._src.optimize.optimistix import NelderMead
from bayeux._src.optimize.optimistix import NonlinearCG as optimistix_NonlinearCG

__all__.extend([
"optimistix_BFGS",
"NelderMead",
"optimistix_NonlinearCG"])
Loading