From 68e1b375cc17c94ae43e70a62de660eb91d0bb6c Mon Sep 17 00:00:00 2001 From: Marc Rasi Date: Tue, 12 May 2020 12:43:22 -0700 Subject: [PATCH 1/2] genericize CGLS --- Examples/Pose2SLAMG2O/main.swift | 2 +- .../Core/EuclideanVectorSpace.swift | 30 ++++++- Sources/SwiftFusion/Geometry/Pose3.swift | 4 + Sources/SwiftFusion/Inference/Errors.swift | 55 ++++++------- .../Inference/GaussianFactorGraph.swift | 27 +++++-- .../LinearLeastSquaresObjective.swift | 56 +++++++++++++ .../SwiftFusion/Inference/VectorValues.swift | 78 ++++++++++++++----- Sources/SwiftFusion/Optimizers/CGLS.swift | 33 ++++---- .../Geometry/Pose3Tests.swift | 2 +- .../Inference/NonlinearFactorGraphTests.swift | 4 +- .../Optimizers/CGLSTests.swift | 2 +- 11 files changed, 214 insertions(+), 79 deletions(-) create mode 100644 Sources/SwiftFusion/Inference/LinearLeastSquaresObjective.swift diff --git a/Examples/Pose2SLAMG2O/main.swift b/Examples/Pose2SLAMG2O/main.swift index f4d6d57d..77f3a16e 100644 --- a/Examples/Pose2SLAMG2O/main.swift +++ b/Examples/Pose2SLAMG2O/main.swift @@ -71,7 +71,7 @@ func main() { for i in 0.. Self { + // return lhs.scaled(by: lhs) + // } + // + // public static func * (_ lhs: Self, _ rhs: Double) -> Self { + // return lhs.scaled(by: rhs) + // } + // + // public static func / (_ lhs: Self, _ rhs: Double) -> Self { + // return lhs.scaled(by: 1 / rhs) + // } } diff --git a/Sources/SwiftFusion/Geometry/Pose3.swift b/Sources/SwiftFusion/Geometry/Pose3.swift index cbc0a941..78b560ae 100644 --- a/Sources/SwiftFusion/Geometry/Pose3.swift +++ b/Sources/SwiftFusion/Geometry/Pose3.swift @@ -4,6 +4,10 @@ import TensorFlow public struct Vector6: EuclideanVectorSpace, VectorProtocol, KeyPathIterable, TangentStandardBasis { var w: Vector3 var v: Vector3 + + public var squaredNorm: Double { + return w.squaredNorm + v.squaredNorm + } } extension Vector6 { diff --git a/Sources/SwiftFusion/Inference/Errors.swift b/Sources/SwiftFusion/Inference/Errors.swift index 833d806b..22dcf21f 100644 --- a/Sources/SwiftFusion/Inference/Errors.swift +++ b/Sources/SwiftFusion/Inference/Errors.swift @@ -18,43 +18,36 @@ import TensorFlow public typealias Error = Vector /// Collection of all errors returned by a Factor Graph -public typealias Errors = Array +public struct Errors { + public var values: Array.DifferentiableView + + /// Creates empty `Errors`. + public init() { + self.values = Array.DifferentiableView() + } + + /// Creates `Errors` containing the given `errors`. + public init(_ errors: [Error]) { + self.values = Array.DifferentiableView(errors) + } + + public static func += (_ lhs: inout Self, _ rhs: [Error]) { + lhs.values.base += rhs + } +} /// Extending Array for Error type /// This simplifies the implementation for `Errors`, albeit in a less self-contained manner /// TODO: change this to a concrete `struct Errors` and implement all the protocols -extension Array where Element == Error { - public static func - (_ a: Self, _ b: Self) -> Self { - var result = a - let _ = result.indices.map { result[$0] = a[$0] + b[$0] } - return result - } - +extension Errors: EuclideanVectorSpace { + + // Note: Requirements of `Differentiable`, `AdditiveArithmetic`, and `VectorProtocol` are automatically + // synthesized. Yay! + /// Calculates the L2 norm - public var norm: Double { + public var squaredNorm: Double { get { - self.map { $0.squared().sum() }.reduce(0.0, { $0 + $1 }) + values.base.map { $0.squared().sum() }.reduce(0.0, { $0 + $1 }) } } - - /// Errors + scalar - static func + (_ lhs: Self, _ rhs: Double) -> Self { - var result = lhs - let _ = result.indices.map { result[$0] += rhs } - return result - } - - /// Errors + Errors - static func + (_ lhs: Self, _ rhs: Self) -> Self { - var result = lhs - let _ = result.indices.map { result[$0] += rhs[$0] } - return result - } - - /// scalar * Errors - static func * (_ lhs: Double, _ rhs: Self) -> Self { - var result = rhs - let _ = result.indices.map { result[$0] *= lhs } - return result - } } diff --git a/Sources/SwiftFusion/Inference/GaussianFactorGraph.swift b/Sources/SwiftFusion/Inference/GaussianFactorGraph.swift index f9121c1a..04d72a07 100644 --- a/Sources/SwiftFusion/Inference/GaussianFactorGraph.swift +++ b/Sources/SwiftFusion/Inference/GaussianFactorGraph.swift @@ -26,7 +26,7 @@ public struct GaussianFactorGraph { public var b: Errors { get { - factors.map { $0.b } + Errors(factors.map { $0.b }) } } @@ -35,12 +35,12 @@ public struct GaussianFactorGraph { /// This calculates `A*x`, where x is the collection of key-values public static func * (lhs: GaussianFactorGraph, rhs: VectorValues) -> Errors { - Array(lhs.factors.map { $0 * rhs }) + return Errors(lhs.factors.map { $0 * rhs }) } /// This calculates `A*x - b`, where x is the collection of key-values public func residual (_ val: VectorValues) -> Errors { - Array(self.factors.map { $0 * val - $0.b }) + return Errors(self.factors.map { $0 * val - $0.b }) } /// Convenience operator for adding factor @@ -51,8 +51,8 @@ public struct GaussianFactorGraph { /// This calculates `A^T * r`, where r is the residual (error) public func atr(_ r: Errors) -> VectorValues { var vv = VectorValues() - for i in r.indices { - let JTr = factors[i].atr(r[i]) + for i in r.values.indices { + let JTr = factors[i].atr(r.values[i]) vv = vv + JTr } @@ -60,3 +60,20 @@ public struct GaussianFactorGraph { return vv } } + +extension GaussianFactorGraph: MatrixLinearLeastSquaresObjective { + public typealias Variables = VectorValues + public typealias Residuals = Errors + + public var bias: Residuals { + return b + } + + public func productA(times x: Variables) -> Residuals { + return self * x + } + + public func productATranspose(times r: Residuals) -> Variables { + return self.atr(r) + } +} diff --git a/Sources/SwiftFusion/Inference/LinearLeastSquaresObjective.swift b/Sources/SwiftFusion/Inference/LinearLeastSquaresObjective.swift new file mode 100644 index 00000000..34691268 --- /dev/null +++ b/Sources/SwiftFusion/Inference/LinearLeastSquaresObjective.swift @@ -0,0 +1,56 @@ +// Copyright 2020 The SwiftFusion Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/// The objective function of a linear least squares optimization problem. +/// +/// The problem is to find the value `x: Variables` that minimizes `energy(at: x)`, where +/// `energy(at: x)` is defined to be the Euclidean norm of `residuals(at: x)`. +public protocol LinearLeastSquaresObjective { + /// The type of the solution. + associatedtype Variables: EuclideanVectorSpace + + /// The type of the residual. + associatedtype Residuals: EuclideanVectorSpace + + /// An affine function of `x`. + func residuals(at x: Variables) -> Residuals +} + +extension LinearLeastSquaresObjective { + /// The objective function that we are trying to minimize. + func energy(at x: Variables) -> Residuals.VectorSpaceScalar { + return residuals(at: x).squaredNorm + } +} + +/// The objective function of a linear least squares optimization problem whose residuals are +/// given by `bias - A * x`, where `A` is a matrix and `bias` is a vector. +public protocol MatrixLinearLeastSquaresObjective: LinearLeastSquaresObjective { + /// The bias term. + var bias: Residuals { get } + + /// Returns the product `A * x`. + func productA(times x: Variables) -> Residuals + + /// Returns the product `A^t * r`. + func productATranspose(times r: Residuals) -> Variables +} + +extension MatrixLinearLeastSquaresObjective { + /// The residuals are determined by the matrix and bias, so a conforming type does not have to + /// define `residuals` itself. + public func residuals(at x: Variables) -> Residuals { + return bias - productA(times: x) + } +} diff --git a/Sources/SwiftFusion/Inference/VectorValues.swift b/Sources/SwiftFusion/Inference/VectorValues.swift index ca06832c..edc2c07d 100644 --- a/Sources/SwiftFusion/Inference/VectorValues.swift +++ b/Sources/SwiftFusion/Inference/VectorValues.swift @@ -48,11 +48,6 @@ public struct VectorValues: KeyPathIterable { } } - /// L2 norm of the VectorValues - var norm: Double { - self._values.map { $0.squared().sum() }.reduce(0.0, { $0 + $1 }) - } - /// Insert a key value pair public mutating func insert(_ key: Int, _ val: Vector) { assert(_indices[key] == nil) @@ -61,26 +56,20 @@ public struct VectorValues: KeyPathIterable { self._values.append(val) } - /// VectorValues + Scalar - static func + (_ lhs: Self, _ rhs: Self.ScalarType) -> Self { - var result = lhs - let _ = result._values.indices.map { result._values[$0] += rhs } - return result - } - - /// Scalar * VectorValues - static func * (_ lhs: Self.ScalarType, _ rhs: Self) -> Self { - var result = rhs - let _ = result._values.indices.map { result._values[$0] *= lhs } - return result - } } -extension VectorValues: Differentiable { +extension VectorValues: EuclideanVectorSpace { + + // NOTE: Most of these are boilerplate that should be synthesized automatically. However, the + // current synthesis functionality can't deal with the `_indices` property. So we have to + // implement it manually for now. + + // MARK: - Differentiable conformance. + public typealias TangentVector = Self -} -extension VectorValues: AdditiveArithmetic { + // MARK: - AdditiveArithmetic conformance. + public static func += (_ lhs: inout VectorValues, _ rhs: VectorValues) { for key in rhs.keys { let rhsVector = rhs[key] @@ -110,6 +99,53 @@ extension VectorValues: AdditiveArithmetic { public static var zero: VectorValues { return VectorValues() } + + // MARK: - VectorProtocol conformance + + public typealias VectorValuesSpaceScalar = Double + + public mutating func add(_ x: Double) { + for index in _values.indices { + _values[index] += x + } + } + + public func adding(_ x: Double) -> VectorValues { + var result = self + result.add(x) + return result + } + + public mutating func subtract(_ x: Double) { + for index in _values.indices { + _values[index] -= x + } + } + + public func subtracting(_ x: Double) -> VectorValues { + var result = self + result.subtract(x) + return result + } + + public mutating func scale(by scalar: Double) { + for index in _values.indices { + _values[index] *= scalar + } + } + + public func scaled(by scalar: Double) -> VectorValues { + var result = self + result.scale(by: scalar) + return result + } + + // MARK: - Additional EuclideanVectorSpace requirements. + + public var squaredNorm: Double { + self._values.map { $0.squared().sum() }.reduce(0.0, { $0 + $1 }) + } + } extension VectorValues: CustomStringConvertible { diff --git a/Sources/SwiftFusion/Optimizers/CGLS.swift b/Sources/SwiftFusion/Optimizers/CGLS.swift index ebadffbe..1201dfd8 100644 --- a/Sources/SwiftFusion/Optimizers/CGLS.swift +++ b/Sources/SwiftFusion/Optimizers/CGLS.swift @@ -32,30 +32,31 @@ public class CGLS { /// Optimize the Gaussian Factor Graph with a initial estimate /// Reference: Bjorck96book_numerical-methods-for-least-squares-problems /// Page 289, Algorithm 7.4.1 - public func optimize(gfg: GaussianFactorGraph, initial: inout VectorValues) { + public func optimize( + objective: Objective, + initial: inout Objective.Variables + ) { step += 1 - - let b = gfg.b - - var x: VectorValues = initial // x(0), the initial value - var r: Errors = b - gfg * x // r(0) = b - A * x(0), the residual - var p = gfg.atr(r) // p(0) = s(0) = A^T * r(0), residual in value space + + var x: Objective.Variables = initial // x(0), the initial value + var r: Objective.Residuals = objective.residuals(at: x) // r(0) = b - A * x(0), the residual + var p = objective.productATranspose(times: r) // p(0) = s(0) = A^T * r(0), residual in value space var s = p // residual of normal equations - var gamma = s.norm // γ(0) = ||s(0)||^2 + var gamma = s.squaredNorm // γ(0) = ||s(0)||^2 while step < max_iteration { - let q = gfg * p // q(k) = A * p(k) - let alpha: Double = gamma / q.norm // α(k) = γ(k)/||q(k)||^2 - x = x + (alpha * p) // x(k+1) = x(k) + α(k) * p(k) - r = r + (-alpha) * q // r(k+1) = r(k) - α(k) * q(k) - s = gfg.atr(r) // s(k+1) = A.T * r(k+1) + let q = objective.productA(times: p) // q(k) = A * p(k) + let alpha: Double = gamma / q.squaredNorm // α(k) = γ(k)/||q(k)||^2 + x = x + p.scaled(by: alpha) // x(k+1) = x(k) + α(k) * p(k) + r = r + q.scaled(by: -alpha) // r(k+1) = r(k) - α(k) * q(k) + s = objective.productATranspose(times: r) // s(k+1) = A.T * r(k+1) - let gamma_next = s.norm // γ(k+1) = ||s(k+1)||^2 + let gamma_next = s.squaredNorm // γ(k+1) = ||s(k+1)||^2 let beta: Double = gamma_next/gamma // β(k) = γ(k+1)/γ(k) gamma = gamma_next - p = s + beta * p // p(k+1) = s(k+1) + β(k) * p(k) + p = s + p.scaled(by: beta) // p(k+1) = s(k+1) + β(k) * p(k) - if (alpha * p).norm < precision { + if alpha * alpha * p.squaredNorm < precision { break } step += 1 diff --git a/Tests/SwiftFusionTests/Geometry/Pose3Tests.swift b/Tests/SwiftFusionTests/Geometry/Pose3Tests.swift index 532012ac..0c4495e2 100644 --- a/Tests/SwiftFusionTests/Geometry/Pose3Tests.swift +++ b/Tests/SwiftFusionTests/Geometry/Pose3Tests.swift @@ -120,7 +120,7 @@ final class Pose3Tests: XCTestCase { dx.insert(i, Vector(zeros: 6)) } - optimizer.optimize(gfg: gfg, initial: &dx) + optimizer.optimize(objective: gfg, initial: &dx) val.move(along: dx) } diff --git a/Tests/SwiftFusionTests/Inference/NonlinearFactorGraphTests.swift b/Tests/SwiftFusionTests/Inference/NonlinearFactorGraphTests.swift index 704f9028..ea9a8d7f 100644 --- a/Tests/SwiftFusionTests/Inference/NonlinearFactorGraphTests.swift +++ b/Tests/SwiftFusionTests/Inference/NonlinearFactorGraphTests.swift @@ -26,7 +26,7 @@ final class NonlinearFactorGraphTests: XCTestCase { print("gfg = \(gfg)") print("error = \(gfg.residual(vv).norm)") - assertEqual((gfg.residual(vv))[0].tensor, expected, accuracy: 1e-6) + assertEqual(gfg.residual(vv).values[0].tensor, expected, accuracy: 1e-6) } /// test CGLS iterative solver @@ -65,7 +65,7 @@ final class NonlinearFactorGraphTests: XCTestCase { dx.insert(i, Vector(zeros: 3)) } - optimizer.optimize(gfg: gfg, initial: &dx) + optimizer.optimize(objective: gfg, initial: &dx) val.move(along: dx) } diff --git a/Tests/SwiftFusionTests/Optimizers/CGLSTests.swift b/Tests/SwiftFusionTests/Optimizers/CGLSTests.swift index 1e808598..15b112fe 100644 --- a/Tests/SwiftFusionTests/Optimizers/CGLSTests.swift +++ b/Tests/SwiftFusionTests/Optimizers/CGLSTests.swift @@ -11,7 +11,7 @@ final class CGLSTests: XCTestCase { let optimizer = CGLS(precision: 1e-7, max_iteration: 10) var x: VectorValues = SimpleGaussianFactorGraph.zeroDelta() - optimizer.optimize(gfg: gfg, initial: &x) + optimizer.optimize(objective: gfg, initial: &x) let expected = SimpleGaussianFactorGraph.correctDelta() From 27893a772f3408624e43f93afe7ff471e93b2a3f Mon Sep 17 00:00:00 2001 From: Marc Rasi Date: Tue, 12 May 2020 20:36:09 -0700 Subject: [PATCH 2/2] rename LinearLeastSquaresObjective -> DecomposedAffineFunction --- Examples/Pose2SLAMG2O/main.swift | 2 +- .../Inference/DecomposedAffineFunction.swift | 44 +++++++++++++++ .../Inference/GaussianFactorGraph.swift | 20 ++++--- .../LinearLeastSquaresObjective.swift | 56 ------------------- Sources/SwiftFusion/Optimizers/CGLS.swift | 18 +++--- .../Geometry/Pose3Tests.swift | 2 +- .../Inference/NonlinearFactorGraphTests.swift | 2 +- .../Optimizers/CGLSTests.swift | 2 +- 8 files changed, 68 insertions(+), 78 deletions(-) create mode 100644 Sources/SwiftFusion/Inference/DecomposedAffineFunction.swift delete mode 100644 Sources/SwiftFusion/Inference/LinearLeastSquaresObjective.swift diff --git a/Examples/Pose2SLAMG2O/main.swift b/Examples/Pose2SLAMG2O/main.swift index 77f3a16e..740ab8a1 100644 --- a/Examples/Pose2SLAMG2O/main.swift +++ b/Examples/Pose2SLAMG2O/main.swift @@ -71,7 +71,7 @@ func main() { for i in 0.. Output + + /// The linear component of the affine function. + func applyLinearForward(_ x: Input) -> Output + + /// The linear adjoint of the linear component of the affine function. + func applyLinearAdjoint(_ y: Output) -> Input + + /// The bias component of the affine function. + /// + /// This is equal to `applyLinearForward(Input.zero)`. + var bias: Output { get } +} + +extension DecomposedAffineFunction { + public func callAsFunction(_ x: Input) -> Output { + return applyLinearForward(x) + bias + } +} diff --git a/Sources/SwiftFusion/Inference/GaussianFactorGraph.swift b/Sources/SwiftFusion/Inference/GaussianFactorGraph.swift index 04d72a07..5ede5d0c 100644 --- a/Sources/SwiftFusion/Inference/GaussianFactorGraph.swift +++ b/Sources/SwiftFusion/Inference/GaussianFactorGraph.swift @@ -61,19 +61,23 @@ public struct GaussianFactorGraph { } } -extension GaussianFactorGraph: MatrixLinearLeastSquaresObjective { - public typealias Variables = VectorValues - public typealias Residuals = Errors +extension GaussianFactorGraph: DecomposedAffineFunction { + public typealias Input = VectorValues + public typealias Output = Errors - public var bias: Residuals { - return b + public func callAsFunction(_ x: Input) -> Output { + return residual(x) } - public func productA(times x: Variables) -> Residuals { + public func applyLinearForward(_ x: Input) -> Output { return self * x } - public func productATranspose(times r: Residuals) -> Variables { - return self.atr(r) + public func applyLinearAdjoint(_ y: Output) -> Input { + return self.atr(y) + } + + public var bias: Output { + return b.scaled(by: -1) } } diff --git a/Sources/SwiftFusion/Inference/LinearLeastSquaresObjective.swift b/Sources/SwiftFusion/Inference/LinearLeastSquaresObjective.swift deleted file mode 100644 index 34691268..00000000 --- a/Sources/SwiftFusion/Inference/LinearLeastSquaresObjective.swift +++ /dev/null @@ -1,56 +0,0 @@ -// Copyright 2020 The SwiftFusion Authors. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -/// The objective function of a linear least squares optimization problem. -/// -/// The problem is to find the value `x: Variables` that minimizes `energy(at: x)`, where -/// `energy(at: x)` is defined to be the Euclidean norm of `residuals(at: x)`. -public protocol LinearLeastSquaresObjective { - /// The type of the solution. - associatedtype Variables: EuclideanVectorSpace - - /// The type of the residual. - associatedtype Residuals: EuclideanVectorSpace - - /// An affine function of `x`. - func residuals(at x: Variables) -> Residuals -} - -extension LinearLeastSquaresObjective { - /// The objective function that we are trying to minimize. - func energy(at x: Variables) -> Residuals.VectorSpaceScalar { - return residuals(at: x).squaredNorm - } -} - -/// The objective function of a linear least squares optimization problem whose residuals are -/// given by `bias - A * x`, where `A` is a matrix and `bias` is a vector. -public protocol MatrixLinearLeastSquaresObjective: LinearLeastSquaresObjective { - /// The bias term. - var bias: Residuals { get } - - /// Returns the product `A * x`. - func productA(times x: Variables) -> Residuals - - /// Returns the product `A^t * r`. - func productATranspose(times r: Residuals) -> Variables -} - -extension MatrixLinearLeastSquaresObjective { - /// The residuals are determined by the matrix and bias, so a conforming type does not have to - /// define `residuals` itself. - public func residuals(at x: Variables) -> Residuals { - return bias - productA(times: x) - } -} diff --git a/Sources/SwiftFusion/Optimizers/CGLS.swift b/Sources/SwiftFusion/Optimizers/CGLS.swift index 1201dfd8..e87401ab 100644 --- a/Sources/SwiftFusion/Optimizers/CGLS.swift +++ b/Sources/SwiftFusion/Optimizers/CGLS.swift @@ -29,27 +29,25 @@ public class CGLS { max_iteration = maxiter } - /// Optimize the Gaussian Factor Graph with a initial estimate + /// Optimize the function `||f(x)||^2`, using CGLS, starting at `initial`. + /// /// Reference: Bjorck96book_numerical-methods-for-least-squares-problems /// Page 289, Algorithm 7.4.1 - public func optimize( - objective: Objective, - initial: inout Objective.Variables - ) { + public func optimize(_ f: F, initial: inout F.Input) { step += 1 - var x: Objective.Variables = initial // x(0), the initial value - var r: Objective.Residuals = objective.residuals(at: x) // r(0) = b - A * x(0), the residual - var p = objective.productATranspose(times: r) // p(0) = s(0) = A^T * r(0), residual in value space + var x: F.Input = initial // x(0), the initial value + var r: F.Output = f(x).scaled(by: -1) // r(0) = -b - A * x(0), the residual + var p = f.applyLinearAdjoint(r) // p(0) = s(0) = A^T * r(0), residual in value space var s = p // residual of normal equations var gamma = s.squaredNorm // γ(0) = ||s(0)||^2 while step < max_iteration { - let q = objective.productA(times: p) // q(k) = A * p(k) + let q = f.applyLinearForward(p) // q(k) = A * p(k) let alpha: Double = gamma / q.squaredNorm // α(k) = γ(k)/||q(k)||^2 x = x + p.scaled(by: alpha) // x(k+1) = x(k) + α(k) * p(k) r = r + q.scaled(by: -alpha) // r(k+1) = r(k) - α(k) * q(k) - s = objective.productATranspose(times: r) // s(k+1) = A.T * r(k+1) + s = f.applyLinearAdjoint(r) // s(k+1) = A.T * r(k+1) let gamma_next = s.squaredNorm // γ(k+1) = ||s(k+1)||^2 let beta: Double = gamma_next/gamma // β(k) = γ(k+1)/γ(k) diff --git a/Tests/SwiftFusionTests/Geometry/Pose3Tests.swift b/Tests/SwiftFusionTests/Geometry/Pose3Tests.swift index 0c4495e2..4613bff9 100644 --- a/Tests/SwiftFusionTests/Geometry/Pose3Tests.swift +++ b/Tests/SwiftFusionTests/Geometry/Pose3Tests.swift @@ -120,7 +120,7 @@ final class Pose3Tests: XCTestCase { dx.insert(i, Vector(zeros: 6)) } - optimizer.optimize(objective: gfg, initial: &dx) + optimizer.optimize(gfg, initial: &dx) val.move(along: dx) } diff --git a/Tests/SwiftFusionTests/Inference/NonlinearFactorGraphTests.swift b/Tests/SwiftFusionTests/Inference/NonlinearFactorGraphTests.swift index ea9a8d7f..ab3fa923 100644 --- a/Tests/SwiftFusionTests/Inference/NonlinearFactorGraphTests.swift +++ b/Tests/SwiftFusionTests/Inference/NonlinearFactorGraphTests.swift @@ -65,7 +65,7 @@ final class NonlinearFactorGraphTests: XCTestCase { dx.insert(i, Vector(zeros: 3)) } - optimizer.optimize(objective: gfg, initial: &dx) + optimizer.optimize(gfg, initial: &dx) val.move(along: dx) } diff --git a/Tests/SwiftFusionTests/Optimizers/CGLSTests.swift b/Tests/SwiftFusionTests/Optimizers/CGLSTests.swift index 15b112fe..16661a52 100644 --- a/Tests/SwiftFusionTests/Optimizers/CGLSTests.swift +++ b/Tests/SwiftFusionTests/Optimizers/CGLSTests.swift @@ -11,7 +11,7 @@ final class CGLSTests: XCTestCase { let optimizer = CGLS(precision: 1e-7, max_iteration: 10) var x: VectorValues = SimpleGaussianFactorGraph.zeroDelta() - optimizer.optimize(objective: gfg, initial: &x) + optimizer.optimize(gfg, initial: &x) let expected = SimpleGaussianFactorGraph.correctDelta()