Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 2 additions & 6 deletions Examples/Pose2SLAMG2O/main.swift
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ struct G2OFactorGraph: G2OReader {
var graph: NonlinearFactorGraph = NonlinearFactorGraph()

public mutating func addInitialGuess(index: Int, pose: Pose2) {
initialGuess.insert(index, AnyDifferentiable(pose))
initialGuess.insert(index, pose)
}

public mutating func addMeasurement(frameIndex: Int, measuredIndex: Int, pose: Pose2) {
Expand Down Expand Up @@ -72,11 +72,7 @@ func main() {
dx.insert(i, Vector(zeros: 3))
}
optimizer.optimize(gfg: gfg, initial: &dx)
for i in 0..<val.count {
var p = val[i].baseAs(Pose2.self)
p.move(along: Vector3(dx[i]))
val[i] = AnyDifferentiable(p)
}
val.move(along: dx)
print("Current error: \(problem.graph.error(val))")
}
print("Final error: \(problem.graph.error(val))")
Expand Down
99 changes: 95 additions & 4 deletions Sources/SwiftFusion/Geometry/Rot3.swift
Original file line number Diff line number Diff line change
Expand Up @@ -88,9 +88,47 @@ public extension Matrix3Coordinate {
init(_ r11 : Double, _ r12 : Double, _ r13 : Double,
_ r21 : Double, _ r22 : Double, _ r23 : Double,
_ r31 : Double, _ r32 : Double, _ r33 : Double) {
R = Tensor<Double>(shape: [3,3], scalars: [r11, r12, r13,
r21, r22, r23,
r31, r32, r33])
R = matrixTensor(
r11, r12, r13,
r21, r22, r23,
r31, r32, r33
)
}

/// Derivative of the above `init`.
@derivative(of: init)
static func vjpInit(
_ r11 : Double, _ r12 : Double, _ r13 : Double,
_ r21 : Double, _ r22 : Double, _ r23 : Double,
_ r31 : Double, _ r32 : Double, _ r33 : Double
) -> (
value: Matrix3Coordinate,
pullback: (TangentVector) -> (
Double, Double, Double,
Double, Double, Double,
Double, Double, Double
)
) {
func pullback(_ v: TangentVector) -> (
Double, Double, Double,
Double, Double, Double,
Double, Double, Double
) {
let s = v.R.scalars
return (
s[0], s[1], s[2],
s[3], s[4], s[5],
s[6], s[7], s[8]
)
}
return (
Matrix3Coordinate(
r11, r12, r13,
r21, r22, r23,
r31, r32, r33
),
pullback
)
}

/// Product of two rotations.
Expand Down Expand Up @@ -125,7 +163,7 @@ extension Matrix3Coordinate: ManifoldCoordinate {
let theta2 = local.squaredNorm
let nearZero = theta2 <= .ulpOfOne
let (wx, wy, wz) = (local.x, local.y, local.z)
let W = Tensor<Double>(shape: [3, 3], scalars: [0.0, -wz, wy, wz, 0.0, -wx, -wy, wx, 0.0])
let W = matrixTensor(0.0, -wz, wy, wz, 0.0, -wx, -wy, wx, 0.0)
let I_3x3: Tensor<Double> = eye(rowCount: 3)
if !nearZero {
let theta = sqrtWrap(theta2)
Expand Down Expand Up @@ -183,3 +221,56 @@ extension Matrix3Coordinate: ManifoldCoordinate {
R = tensor
}
}

/// Returns a matrix tensor containing the given scalars.
// TODO: This is a workaround for the problem mentioned in
// https://github.com/apple/swift/pull/31723. When that fix is available, we can delete the
// custom derivative of this function, and inline the function into its callsites.
@differentiable
fileprivate func matrixTensor(
_ r11 : Double, _ r12 : Double, _ r13 : Double,
_ r21 : Double, _ r22 : Double, _ r23 : Double,
_ r31 : Double, _ r32 : Double, _ r33 : Double
) -> Tensor<Double> {
return Tensor<Double>(shape: [3,3], scalars: [r11, r12, r13,
r21, r22, r23,
r31, r32, r33])
}

/// Derivative of `matrixTensor`.
// This works around a problem with differentiating array literals:
// https://github.com/apple/swift/pull/31723
@derivative(of: matrixTensor)
fileprivate func vjpMatrixTensor(
_ r11 : Double, _ r12 : Double, _ r13 : Double,
_ r21 : Double, _ r22 : Double, _ r23 : Double,
_ r31 : Double, _ r32 : Double, _ r33 : Double
) -> (
value: Tensor<Double>,
pullback: (Tensor<Double>) -> (
Double, Double, Double,
Double, Double, Double,
Double, Double, Double
)
) {
func pullback(_ v: Tensor<Double>) -> (
Double, Double, Double,
Double, Double, Double,
Double, Double, Double
) {
let s = v.scalars
return (
s[0], s[1], s[2],
s[3], s[4], s[5],
s[6], s[7], s[8]
)
}
return (
matrixTensor(
r11, r12, r13,
r21, r22, r23,
r31, r32, r33
),
pullback
)
}
16 changes: 6 additions & 10 deletions Sources/SwiftFusion/Inference/BetweenFactor.swift
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,9 @@ import TensorFlow
/// ================
/// `Input`: the input values as key-value pairs
///
public struct BetweenFactor<T: LieGroup>: NonlinearFactor where T.Coordinate.LocalCoordinate: VectorConvertible {
public struct BetweenFactor<T: LieGroup>: NonlinearFactor
where T.TangentVector: VectorConvertible, T.TangentVector == T.Coordinate.LocalCoordinate
{

var key1: Int
var key2: Int
Expand Down Expand Up @@ -67,7 +69,7 @@ public struct BetweenFactor<T: LieGroup>: NonlinearFactor where T.Coordinate.Loc
/// Returns the `error` of the factor.
@differentiable(wrt: values)
public func error(_ values: Values) -> Double {
let actual = values[key1].baseAs(T.self).inverse() * values[key2].baseAs(T.self)
let actual = values[key1, as: T.self].inverse() * values[key2, as: T.self]
let error = difference.localCoordinate(actual)
// TODO: It would be faster to call `error.squaredNorm` because then we don't have to pay
// the cost of a conversion to `Vector`. To do this, we need a protocol
Expand All @@ -78,19 +80,13 @@ public struct BetweenFactor<T: LieGroup>: NonlinearFactor where T.Coordinate.Loc
@differentiable(wrt: values)
public func errorVector(_ values: Values) -> T.Coordinate.LocalCoordinate {
let error = difference.localCoordinate(
values[key1].baseAs(T.self).inverse() * values[key2].baseAs(T.self)
values[key1, as: T.self].inverse() * values[key2, as: T.self]
)

return error
}

public func linearize(_ values: Values) -> JacobianFactor {
let j = jacobian(of: self.errorVector, at: values)

let j1 = Matrix(stacking: (0..<j.count).map { i in (j[i]._values[values._indices[key1]!].base as! T.Coordinate.LocalCoordinate).vector } )
let j2 = Matrix(stacking: (0..<j.count).map { i in (j[i]._values[values._indices[key2]!].base as! T.Coordinate.LocalCoordinate).vector } )

// TODO: remove this negative sign
return JacobianFactor(keys, [j1, j2], errorVector(values).vector.scaled(by: -1))
return JacobianFactor(of: self.errorVector, at: values)
}
}
37 changes: 37 additions & 0 deletions Sources/SwiftFusion/Inference/JacobianFactor.swift
Original file line number Diff line number Diff line change
Expand Up @@ -104,3 +104,40 @@ public struct JacobianFactor: LinearFactor {
return result
}
}

extension JacobianFactor {
/// Creates a `JacobianFactor` by linearizing the error function `f` at `p`.
public init<R: VectorConvertible & TangentStandardBasis>(
of f: @differentiable (Values) -> R,
at p: Values
) {
// Compute the rows of the jacobian.
let (value, pb) = valueWithPullback(at: p, in: f)
let rows = R.tangentStandardBasis.map { pb($0) }

// Construct empty matrices with the correct shape.
assert(rows.count > 0)
var matrices = Dictionary<Int, Matrix>(uniqueKeysWithValues: rows[0].keys.map { key in
let row = rows[0][key]
var matrix = Matrix([], rowCount: 0, columnCount: row.dimension)
matrix.reserveCapacity(rows.count * row.dimension)
return (key, matrix)
})

// Fill in the matrix entries.
for row in rows {
for key in row.keys {
matrices[key]!.append(row: row[key])
}
}

// Return the jacobian factor with the matrices and value.
let orderedKeys = Array(matrices.keys)
self = JacobianFactor(
orderedKeys,
orderedKeys.map { matrices[$0]! },
// TODO: remove this negative sign
value.vector.scaled(by: -1)
)
}
}
14 changes: 7 additions & 7 deletions Sources/SwiftFusion/Inference/PriorFactor.swift
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,9 @@ import TensorFlow
/// ================
/// `Input`: the input values as key-value pairs
///
public struct PriorFactor<T: LieGroup>: NonlinearFactor where T.Coordinate.LocalCoordinate: VectorConvertible {
public struct PriorFactor<T: LieGroup>: NonlinearFactor
where T.TangentVector: VectorConvertible, T.TangentVector == T.Coordinate.LocalCoordinate
{
@noDerivative
public var keys: Array<Int> = []
public var difference: T
Expand All @@ -41,7 +43,7 @@ public struct PriorFactor<T: LieGroup>: NonlinearFactor where T.Coordinate.Local
/// Returns the `error` of the factor.
@differentiable(wrt: values)
public func error(_ values: Values) -> Double {
let error = difference.localCoordinate(values[keys[0]].baseAs(T.self))
let error = difference.localCoordinate(values[keys[0], as: T.self])
// TODO: It would be faster to call `error.squaredNorm` because then we don't have to pay
// the cost of a conversion to `Vector`. To do this, we need a protocol
// with a `squaredNorm` requirement.
Expand All @@ -50,14 +52,12 @@ public struct PriorFactor<T: LieGroup>: NonlinearFactor where T.Coordinate.Local

@differentiable(wrt: values)
public func errorVector(_ values: Values) -> T.Coordinate.LocalCoordinate {
let val = values[keys[0]].baseAs(T.self)
let val = values[keys[0], as: T.self]
let error = difference.localCoordinate(val)
return error
}

public func linearize(_ values: Values) -> JacobianFactor {
let j = jacobian(of: self.errorVector, at: values)
let j1 = Matrix(stacking: (0..<j.count).map { i in (j[i]._values[values._indices[keys[0]]!].base as! T.Coordinate.LocalCoordinate).vector } )
return JacobianFactor(keys, [j1], errorVector(values).vector.scaled(by: -1))
return JacobianFactor(of: self.errorVector, at: values)
}
}
57 changes: 48 additions & 9 deletions Sources/SwiftFusion/Inference/Values.swift
Original file line number Diff line number Diff line change
Expand Up @@ -34,25 +34,64 @@ public struct Values: Differentiable & KeyPathIterable {
public var count: Int {
return _values.count
}

/// MARK: - Differentiable conformance and related properties and helpers.

/// The product space of the tangent spaces of all the values.
public typealias TangentVector = VectorValues

/// `makeTangentVector[i]` produces a type-erased tangent vector for `values[i]`.
private var makeTangentVector: [(Vector) -> AnyDerivative] = []

public mutating func move(along direction: VectorValues) {
for key in direction.keys {
let index = self._indices[key]!
self._values[index].move(along: makeTangentVector[index](direction[key]))
}
}

/// The subscript operator, with some indirection
/// Should be replaced after Dictionary is in
/// MARK: - Value manipulation methods.

/// Access the value at `key`, with type `type`.
///
/// Precondition: The value actually has type `type`.
@differentiable
public subscript(key: Int) -> AnyDifferentiable {
public subscript<T: Differentiable>(key: Int, as type: T.Type) -> T
where T.TangentVector: VectorConvertible
{
get {
_values[_indices[key]!]
return _values[_indices[key]!].baseAs(type)
}
set(newVal) {
_values[_indices[key]!] = newVal
set(newValue) {
_values[_indices[key]!] = AnyDifferentiable(newValue)
}
}


@derivative(of: subscript)
@usableFromInline
func vjpSubscript<T: Differentiable>(key: Int, as type: T.Type)
-> (value: T, pullback: (T.TangentVector) -> VectorValues)
where T.TangentVector: VectorConvertible
{
return (
self._values[self._indices[key]!].baseAs(type),
{ (t: T.TangentVector) in
var vectorValues = VectorValues()
vectorValues.insert(key, t.vector)
return vectorValues
}
)
}

/// Insert a key value pair
public mutating func insert(_ key: Int, _ val: AnyDifferentiable) {
public mutating func insert<T: Differentiable>(_ key: Int, _ val: T)
where T.TangentVector: VectorConvertible
{
assert(_indices[key] == nil)

self._indices[key] = self._values.count
self._values.append(val)
self._values.append(AnyDifferentiable(val))
self.makeTangentVector.append({ AnyDerivative(T.TangentVector($0)) })
}

}
Expand Down
29 changes: 12 additions & 17 deletions Tests/SwiftFusionTests/Geometry/Pose3Tests.swift
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ final class Pose3Tests: XCTestCase {
let prior_factor = PriorFactor(0, t1)

var vals = Values()
vals.insert(0, AnyDifferentiable(t1)) // should be identity matrix
vals.insert(0, t1) // should be identity matrix
// Change this to t2, still zero in upper left block

let actual = prior_factor.linearize(vals).jacobians[0]
Expand Down Expand Up @@ -74,7 +74,7 @@ final class Pose3Tests: XCTestCase {
let gti = Vector3(radius * cos(theta), radius * sin(theta), 0)
let oRi = Rot3.fromTangent(Vector3(0, 0, -theta)) // negative yaw goes counterclockwise, with Z down !
let gTi = Pose3(gRo * oRi, gti)
values.insert(key, AnyDifferentiable(gTi))
values.insert(key, gTi)
theta = theta + dtheta
}
return values
Expand All @@ -83,8 +83,8 @@ final class Pose3Tests: XCTestCase {
func testGtsamPose3SLAMExample() {
// Create a hexagon of poses
let hexagon = circlePose3(numPoses: 6, radius: 1.0)
let p0 = hexagon[0].baseAs(Pose3.self)
let p1 = hexagon[1].baseAs(Pose3.self)
let p0 = hexagon[0, as: Pose3.self]
let p1 = hexagon[1, as: Pose3.self]

// create a Pose graph with one equality constraint and one measurement
var fg = NonlinearFactorGraph()
Expand All @@ -101,12 +101,12 @@ final class Pose3Tests: XCTestCase {
// Create initial config
var val = Values()
let s = 0.10
val.insert(0, AnyDifferentiable(p0))
val.insert(1, AnyDifferentiable(hexagon[1].baseAs(Pose3.self).retract(Vector6(s * Tensor<Double>(randomNormal: [6])))))
val.insert(2, AnyDifferentiable(hexagon[2].baseAs(Pose3.self).retract(Vector6(s * Tensor<Double>(randomNormal: [6])))))
val.insert(3, AnyDifferentiable(hexagon[3].baseAs(Pose3.self).retract(Vector6(s * Tensor<Double>(randomNormal: [6])))))
val.insert(4, AnyDifferentiable(hexagon[4].baseAs(Pose3.self).retract(Vector6(s * Tensor<Double>(randomNormal: [6])))))
val.insert(5, AnyDifferentiable(hexagon[5].baseAs(Pose3.self).retract(Vector6(s * Tensor<Double>(randomNormal: [6])))))
val.insert(0, p0)
val.insert(1, hexagon[1, as: Pose3.self].retract(Vector6(s * Tensor<Double>(randomNormal: [6]))))
val.insert(2, hexagon[2, as: Pose3.self].retract(Vector6(s * Tensor<Double>(randomNormal: [6]))))
val.insert(3, hexagon[3, as: Pose3.self].retract(Vector6(s * Tensor<Double>(randomNormal: [6]))))
val.insert(4, hexagon[4, as: Pose3.self].retract(Vector6(s * Tensor<Double>(randomNormal: [6]))))
val.insert(5, hexagon[5, as: Pose3.self].retract(Vector6(s * Tensor<Double>(randomNormal: [6]))))

// optimize
for _ in 0..<16 {
Expand All @@ -122,15 +122,10 @@ final class Pose3Tests: XCTestCase {

optimizer.optimize(gfg: gfg, initial: &dx)


for i in 0..<6 {
var p = val[i].baseAs(Pose3.self)
p.move(along: Vector6(dx[i]))
val[i] = AnyDifferentiable(p)
}
val.move(along: dx)
}

let pose_1 = val[1].baseAs(Pose3.self)
let pose_1 = val[1, as: Pose3.self]
assertAllKeyPathEqual(pose_1, p1, accuracy: 1e-2)
}
}
Loading