Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 22 additions & 0 deletions Sources/SwiftFusion/Core/MathUtil.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
//===----------------------------------------------------------------------===//
// Pseudo inverse
//===----------------------------------------------------------------------===//

import TensorFlow

public func pinv(_ m: Tensor<Double>) -> Tensor<Double> {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Document - even when name is obvious. Say what it will compute in different cases.

let (J_s, J_u, J_v) = m.svd(computeUV: true, fullMatrices: true)

let m = J_v!.shape[1]
let n = J_u!.shape[0]
if (m > n) {
let J_ss = J_s.reciprocal.diagonal().concatenated(with: Tensor<Double>(repeating: 0, shape: [m-n, n]), alongAxis: 0)
return matmul(matmul(J_v!, J_ss), J_u!.transposed())
} else if (m < n) {
let J_ss = J_s.reciprocal.diagonal().concatenated(with: Tensor<Double>(repeating: 0, shape: [m, n-m]), alongAxis: 1)
return matmul(matmul(J_v!, J_ss), J_u!.transposed())
} else {
let J_ss = J_s.reciprocal.diagonal()
return matmul(matmul(J_v!, J_ss), J_u!.transposed())
}
}
8 changes: 8 additions & 0 deletions Sources/SwiftFusion/Geometry/File.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
//
// File.swift
//
//
// Created by Fan Jiang on 2020/4/24.
//

import Foundation
97 changes: 97 additions & 0 deletions Sources/SwiftFusion/Inference/BetweenFactor.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
// Copyright 2019 The SwiftFusion Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import TensorFlow

/// A `NonlinearFactor` that calculates the difference of two Values of the same type
///
/// Input is a dictionary of `Key` to `Value` pairs, and the output is the scalar
/// error value
///
/// Interpretation
/// ================
/// `Input`: the input values as key-value pairs
///
public struct BetweenFactor: NonlinearFactor {

var key1: Int
var key2: Int
@noDerivative
public var keys: Array<Int> {
get {
[key1, key2]
}
}
public var difference: Pose2
public typealias Output = Error

public init (_ key1: Int, _ key2: Int, _ difference: Pose2) {
self.key1 = key1
self.key2 = key2
self.difference = difference
}
typealias ScalarType = Double

/// TODO: `Dictionary` still does not conform to `Differentiable`
/// Tracking issue: https://bugs.swift.org/browse/TF-899
// typealias Input = Dictionary<UInt, Tensor<ScalarType>>

// I want to build a general differentiable dot product
// @differentiable(wrt: (a, b))
// static func dot<T: Differentiable & KeyPathIterable>(_ a: T, _ b: T) -> Double {
// let squared = a.recursivelyAllKeyPaths(to: Double.self).map { a[keyPath: $0] * b[keyPath: $0] }
//
// return squared.differentiableReduce(0.0, {$0 + $1})
// }
//
// @derivative(of: dot)
// static func _vjpDot<T: Differentiable & KeyPathIterable>(_ a: T, _ b: T) -> (
// value: Double,
// pullback: (Double) -> (T.TangentVector, T.TangentVector)
// ) {
// return (value: dot(a, b), pullback: { v in
// ((at.scaled(by: v), bt.scaled(by: v)))
// })
// }

/// Returns the `error` of the factor.
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

In GTSAM this is decided by the noise model - we should do the same here I think to allow for robust error models

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I am thinking about adding the NoiseModel later after we got polymorphic BetweenFactor working...

@differentiable(wrt: values)
public func error(_ values: Values) -> Double {
let error = between(
between(values[key2].baseAs(Pose2.self), values[key1].baseAs(Pose2.self)),
difference
)

return error.t.norm + error.rot.theta * error.rot.theta
}

@differentiable(wrt: values)
public func errorVector(_ values: Values) -> Vector3 {
let error = between(
between(values[key2].baseAs(Pose2.self), values[key1].baseAs(Pose2.self)),
difference
)

return Vector3(error.rot.theta, error.t.x, error.t.y)
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This should actually return something using local coordinates (between(values[key1].baseAs(Pose2.self), values[key2].baseAs(Pose2.self).localCoordinates(around: difference)), right?

If so, you could add a TODO and a github issue for this, because this seems to work for now.

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Correct, this is on my TODO :)

Will change shortly...

}

public func linearize(_ values: Values) -> JacobianFactor {
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think it's very likely that we can write a default generic linearize function that automatically does this for all NonlinearFactors, so that we don't have to repeat this every time we define a factor.

I have to run to a meeting soon so I don't have time to figure it out now, but I'll follow up later.

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Nice!

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Done in #49!

let j = jacobian(of: self.errorVector, at: values)

let j1 = Tensor<Double>(stacking: (0..<3).map { i in (j[i]._values[values._indices[key1]!].base as! Pose2.TangentVector).tensor.reshaped(to: TensorShape([3])) })
let j2 = Tensor<Double>(stacking: (0..<3).map { i in (j[i]._values[values._indices[key2]!].base as! Pose2.TangentVector).tensor.reshaped(to: TensorShape([3])) })

// TODO: remove this negative sign
return JacobianFactor(keys, [j1, j2], -errorVector(values).tensor.reshaped(to: [3, 1]))
}
}
26 changes: 24 additions & 2 deletions Sources/SwiftFusion/Inference/Factor.swift
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ import TensorFlow

/// The most general factor protocol.
public protocol Factor {
var keys: Array<Int> { get set }
var keys: Array<Int> { get }
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is there any place where you need to get the keys from a Factor? (Of course, the implementations of the factors need to use their keys, but do any users of factors need to get the keys?) If not, you could simplify the Factor API by completely removing the keys requirement.

And if you can remove keys, then you can also simplify the protocol hierarchy by completely removing Factor.

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think we still have this in some of the algorithms for iterating through the keys to lookup the Values, but that may change if we have a better encapsulated API

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We will need it for direct solvers, to decide on the ordering.

}

/// A `LinearFactor` corresponds to the `GaussianFactor` in GTSAM.
Expand All @@ -30,11 +30,33 @@ public protocol Factor {
public protocol LinearFactor: Factor {
typealias ScalarType = Double

/// TODO: `Dictionary` still does not conform to `Differentiable`
/// Tracking issue: https://bugs.swift.org/browse/TF-899
// typealias Input = Dictionary<UInt, Tensor<ScalarType>>

/// Returns the `error` of the factor.
func error(_ values: VectorValues) -> ScalarType
}

/// A `NonlinearFactor` corresponds to the `NonlinearFactor` in GTSAM.
///
/// Input is a dictionary of `Key` to `Value` pairs, and the output is the scalar
/// error value
///
/// Interpretation
/// ================
/// `Input`: the input values as key-value pairs
///
public protocol NonlinearFactor: Factor {
typealias ScalarType = Double

/// TODO: `Dictionary` still does not conform to `Differentiable`
/// Tracking issue: https://bugs.swift.org/browse/TF-899
// typealias Input = Dictionary<UInt, Tensor<ScalarType>>

/// Returns the `error` of the factor.
@differentiable(wrt: values)
func error(_ indices: [Int], values: Tensor<ScalarType>) -> ScalarType
func error(_ values: Values) -> ScalarType

func linearize(_ values: Values) -> JacobianFactor
}
33 changes: 0 additions & 33 deletions Sources/SwiftFusion/Inference/FactorGraph.swift

This file was deleted.

9 changes: 6 additions & 3 deletions Sources/SwiftFusion/Inference/GaussianFactorGraph.swift
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ import TensorFlow
/// A factor graph for linear problems
/// Factors are the Jacobians between the corresponding variables and measurements
/// TODO(fan): Add noise model
public struct GaussianFactorGraph: FactorGraph {
public struct GaussianFactorGraph {
public typealias KeysType = Array<Int>

public typealias FactorsType = Array<JacobianFactor>
Expand All @@ -34,11 +34,15 @@ public struct GaussianFactorGraph: FactorGraph {
public init() { }

/// This calculates `A*x`, where x is the collection of key-values
/// Note A is a
public static func * (lhs: GaussianFactorGraph, rhs: VectorValues) -> Errors {
Array(lhs.factors.map { $0 * rhs })
}

/// This calculates `A*x - b`, where x is the collection of key-values
public func residual (_ val: VectorValues) -> Errors {
Array(self.factors.map { $0 * val - $0.b })
}

/// Convenience operator for adding factor
public static func += (lhs: inout Self, rhs: JacobianFactor) {
lhs.factors.append(rhs)
Expand All @@ -50,7 +54,6 @@ public struct GaussianFactorGraph: FactorGraph {
for i in r.indices {
let JTr = factors[i].atr(r[i])

print("JTr = \(JTr)")
vv = vv + JTr
}

Expand Down
5 changes: 3 additions & 2 deletions Sources/SwiftFusion/Inference/JacobianFactor.swift
Original file line number Diff line number Diff line change
Expand Up @@ -39,10 +39,11 @@ import TensorFlow
/// and `HessianFactor` conform to this protocol instead.
public struct JacobianFactor: LinearFactor {

@differentiable(wrt: values)
public func error(_ indices: [Int], values: Tensor<ScalarType>) -> ScalarType {
// TODO(fan): correct this and add a unit test
public func error(_ values: VectorValues) -> ScalarType {
ScalarType.zero
}

public var dimension: Int {
get {
jacobians[0].shape.dimensions[0]
Expand Down
49 changes: 49 additions & 0 deletions Sources/SwiftFusion/Inference/NonlinearFactorGraph.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
// Copyright 2019 The SwiftFusion Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import TensorFlow

/// A factor graph for nonlinear problems
/// TODO(fan): Add noise model
public struct NonlinearFactorGraph {
public typealias KeysType = Array<Int>

public typealias FactorsType = Array<NonlinearFactor>

public var keys: KeysType = []
public var factors: FactorsType = []

/// Default initializer
public init() { }

/// Convenience operator for adding factor
public static func += (lhs: inout Self, rhs: NonlinearFactor) {
lhs.factors.append(rhs)
}

/// linearize the nonlinear factor graph to a linear factor graph
public func linearize(_ values: Values) -> GaussianFactorGraph {
var gfg = GaussianFactorGraph()

for i in factors {
let linearized = i.linearize(values)

// Assertion for the shape of Jacobian
assert(linearized.jacobians.map { $0.shape.count == 2 }.reduce(true, { $0 && $1 }))

gfg += linearized
}

return gfg
}
}
69 changes: 69 additions & 0 deletions Sources/SwiftFusion/Inference/PriorFactor.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
// Copyright 2019 The SwiftFusion Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import TensorFlow

/// A `NonlinearFactor` that returns the difference of value and desired value
///
/// Input is a dictionary of `Key` to `Value` pairs, and the output is the scalar
/// error value
///
/// Interpretation
/// ================
/// `Input`: the input values as key-value pairs
///
public struct PriorFactor: NonlinearFactor {
@noDerivative
public var keys: Array<Int> = []
public var difference: Pose2
public typealias Output = Error

public init (_ key: Int, _ difference: Pose2) {
keys = [key]
self.difference = difference
}
typealias ScalarType = Double

/// TODO: `Dictionary` still does not conform to `Differentiable`
/// Tracking issue: https://bugs.swift.org/browse/TF-899
// typealias Input = Dictionary<UInt, Tensor<ScalarType>>

/// Returns the `error` of the factor.
@differentiable(wrt: values)
public func error(_ values: Values) -> Double {
let error = between(
values[keys[0]].baseAs(Pose2.self),
difference
)

return error.t.norm + error.rot.theta * error.rot.theta
}

@differentiable(wrt: values)
public func errorVector(_ values: Values) -> Vector3 {
let error = between(
values[keys[0]].baseAs(Pose2.self),
difference
)

return Vector3(error.rot.theta, error.t.x, error.t.y)
}

public func linearize(_ values: Values) -> JacobianFactor {
let j = jacobian(of: self.errorVector, at: values)

let j1 = Tensor<Double>(stacking: (0..<3).map { i in (j[i]._values[0].base as! Pose2.TangentVector).tensor.reshaped(to: TensorShape([3])) })

return JacobianFactor(keys, [j1], -errorVector(values).tensor.reshaped(to: [3, 1]))
}
}
Loading